VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h@ 100856

Last change on this file since 100856 was 100856, checked in by vboxsync, 19 months ago

VMM/IEM: Made use of the IEM_MC_F_XXX flags to limit the number of threaded functions generated. Added some more flags for completion. Fixed bug wrt selecting pre-386 IP updating function variations. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 520.5 KB
Line 
1/* $Id: IEMAllInstTwoByte0f.cpp.h 100856 2023-08-11 09:47:22Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * MMX, MMX.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
53 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_FPU_TO_MMX_MODE();
59
60 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
61 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
62 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
63 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
64
65 IEM_MC_ADVANCE_RIP_AND_FINISH();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * MMX, [mem64].
72 */
73 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER);
74 IEM_MC_ARG(uint64_t *, pDst, 0);
75 IEM_MC_LOCAL(uint64_t, uSrc);
76 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
81 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_FPU_USAGE();
85 IEM_MC_FPU_TO_MMX_MODE();
86
87 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
88 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
89 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
90
91 IEM_MC_ADVANCE_RIP_AND_FINISH();
92 IEM_MC_END();
93 }
94}
95
96
97/**
98 * Common worker for MMX instructions on the form:
99 * pxxx mm1, mm2/mem64
100 *
101 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
102 * no FXSAVE state, just the operands.
103 */
104FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
105{
106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
107 if (IEM_IS_MODRM_REG_MODE(bRm))
108 {
109 /*
110 * MMX, MMX.
111 */
112 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
113 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
114 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
116 IEM_MC_ARG(uint64_t *, pDst, 0);
117 IEM_MC_ARG(uint64_t const *, pSrc, 1);
118 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
119 IEM_MC_PREPARE_FPU_USAGE();
120 IEM_MC_FPU_TO_MMX_MODE();
121
122 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
123 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
124 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
125 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
126
127 IEM_MC_ADVANCE_RIP_AND_FINISH();
128 IEM_MC_END();
129 }
130 else
131 {
132 /*
133 * MMX, [mem64].
134 */
135 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER);
136 IEM_MC_ARG(uint64_t *, pDst, 0);
137 IEM_MC_LOCAL(uint64_t, uSrc);
138 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
140
141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
143 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
144 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
145
146 IEM_MC_PREPARE_FPU_USAGE();
147 IEM_MC_FPU_TO_MMX_MODE();
148
149 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
150 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
151 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
152
153 IEM_MC_ADVANCE_RIP_AND_FINISH();
154 IEM_MC_END();
155 }
156}
157
158
159/**
160 * Common worker for MMX instructions on the form:
161 * pxxx mm1, mm2/mem64
162 * for instructions introduced with SSE.
163 */
164FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
165{
166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
167 if (IEM_IS_MODRM_REG_MODE(bRm))
168 {
169 /*
170 * MMX, MMX.
171 */
172 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
173 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
174 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
176 IEM_MC_ARG(uint64_t *, pDst, 0);
177 IEM_MC_ARG(uint64_t const *, pSrc, 1);
178 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
179 IEM_MC_PREPARE_FPU_USAGE();
180 IEM_MC_FPU_TO_MMX_MODE();
181
182 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
183 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
184 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
185 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
186
187 IEM_MC_ADVANCE_RIP_AND_FINISH();
188 IEM_MC_END();
189 }
190 else
191 {
192 /*
193 * MMX, [mem64].
194 */
195 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER);
196 IEM_MC_ARG(uint64_t *, pDst, 0);
197 IEM_MC_LOCAL(uint64_t, uSrc);
198 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200
201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
203 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
204 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
205
206 IEM_MC_PREPARE_FPU_USAGE();
207 IEM_MC_FPU_TO_MMX_MODE();
208
209 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
210 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
211 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
212
213 IEM_MC_ADVANCE_RIP_AND_FINISH();
214 IEM_MC_END();
215 }
216}
217
218
219/**
220 * Common worker for MMX instructions on the form:
221 * pxxx mm1, mm2/mem64
222 * for instructions introduced with SSE.
223 *
224 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
225 * no FXSAVE state, just the operands.
226 */
227FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
228{
229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
230 if (IEM_IS_MODRM_REG_MODE(bRm))
231 {
232 /*
233 * MMX, MMX.
234 */
235 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
236 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
237 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
239 IEM_MC_ARG(uint64_t *, pDst, 0);
240 IEM_MC_ARG(uint64_t const *, pSrc, 1);
241 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
242 IEM_MC_PREPARE_FPU_USAGE();
243 IEM_MC_FPU_TO_MMX_MODE();
244
245 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
246 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
247 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
248 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
249
250 IEM_MC_ADVANCE_RIP_AND_FINISH();
251 IEM_MC_END();
252 }
253 else
254 {
255 /*
256 * MMX, [mem64].
257 */
258 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER);
259 IEM_MC_ARG(uint64_t *, pDst, 0);
260 IEM_MC_LOCAL(uint64_t, uSrc);
261 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
263
264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
267 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
268
269 IEM_MC_PREPARE_FPU_USAGE();
270 IEM_MC_FPU_TO_MMX_MODE();
271
272 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
273 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
274 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
275
276 IEM_MC_ADVANCE_RIP_AND_FINISH();
277 IEM_MC_END();
278 }
279}
280
281
282/**
283 * Common worker for MMX instructions on the form:
284 * pxxx mm1, mm2/mem64
285 * that was introduced with SSE2.
286 */
287FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full_Sse2, PFNIEMAIMPLMEDIAF2U64, pfnU64)
288{
289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
290 if (IEM_IS_MODRM_REG_MODE(bRm))
291 {
292 /*
293 * MMX, MMX.
294 */
295 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
296 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
297 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
299 IEM_MC_ARG(uint64_t *, pDst, 0);
300 IEM_MC_ARG(uint64_t const *, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
302 IEM_MC_PREPARE_FPU_USAGE();
303 IEM_MC_FPU_TO_MMX_MODE();
304
305 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
306 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
307 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
308 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
309
310 IEM_MC_ADVANCE_RIP_AND_FINISH();
311 IEM_MC_END();
312 }
313 else
314 {
315 /*
316 * MMX, [mem64].
317 */
318 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER);
319 IEM_MC_ARG(uint64_t *, pDst, 0);
320 IEM_MC_LOCAL(uint64_t, uSrc);
321 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
323
324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
326 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
327 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
328
329 IEM_MC_PREPARE_FPU_USAGE();
330 IEM_MC_FPU_TO_MMX_MODE();
331
332 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
333 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
334 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
335
336 IEM_MC_ADVANCE_RIP_AND_FINISH();
337 IEM_MC_END();
338 }
339}
340
341
342/**
343 * Common worker for SSE instructions of the form:
344 * pxxx xmm1, xmm2/mem128
345 *
346 * Proper alignment of the 128-bit operand is enforced.
347 * SSE cpuid checks. No SIMD FP exceptions.
348 *
349 * @sa iemOpCommonSse2_FullFull_To_Full
350 */
351FNIEMOP_DEF_1(iemOpCommonSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
352{
353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
354 if (IEM_IS_MODRM_REG_MODE(bRm))
355 {
356 /*
357 * XMM, XMM.
358 */
359 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
361 IEM_MC_ARG(PRTUINT128U, pDst, 0);
362 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
363 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
364 IEM_MC_PREPARE_SSE_USAGE();
365 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
366 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
367 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
368 IEM_MC_ADVANCE_RIP_AND_FINISH();
369 IEM_MC_END();
370 }
371 else
372 {
373 /*
374 * XMM, [mem128].
375 */
376 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER);
377 IEM_MC_ARG(PRTUINT128U, pDst, 0);
378 IEM_MC_LOCAL(RTUINT128U, uSrc);
379 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
381
382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
384 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
385 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
386
387 IEM_MC_PREPARE_SSE_USAGE();
388 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
389 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
390
391 IEM_MC_ADVANCE_RIP_AND_FINISH();
392 IEM_MC_END();
393 }
394}
395
396
397/**
398 * Common worker for SSE2 instructions on the forms:
399 * pxxx xmm1, xmm2/mem128
400 *
401 * Proper alignment of the 128-bit operand is enforced.
402 * Exceptions type 4. SSE2 cpuid checks.
403 *
404 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
405 */
406FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
407{
408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
409 if (IEM_IS_MODRM_REG_MODE(bRm))
410 {
411 /*
412 * XMM, XMM.
413 */
414 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
417 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
418 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
419 IEM_MC_PREPARE_SSE_USAGE();
420 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
421 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
422 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
423 IEM_MC_ADVANCE_RIP_AND_FINISH();
424 IEM_MC_END();
425 }
426 else
427 {
428 /*
429 * XMM, [mem128].
430 */
431 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER);
432 IEM_MC_ARG(PRTUINT128U, pDst, 0);
433 IEM_MC_LOCAL(RTUINT128U, uSrc);
434 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
436
437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
439 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
440 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
441
442 IEM_MC_PREPARE_SSE_USAGE();
443 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
444 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
445
446 IEM_MC_ADVANCE_RIP_AND_FINISH();
447 IEM_MC_END();
448 }
449}
450
451
452/**
453 * Common worker for SSE2 instructions on the forms:
454 * pxxx xmm1, xmm2/mem128
455 *
456 * Proper alignment of the 128-bit operand is enforced.
457 * Exceptions type 4. SSE2 cpuid checks.
458 *
459 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
460 * no FXSAVE state, just the operands.
461 *
462 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
463 */
464FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
465{
466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
467 if (IEM_IS_MODRM_REG_MODE(bRm))
468 {
469 /*
470 * XMM, XMM.
471 */
472 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
474 IEM_MC_ARG(PRTUINT128U, pDst, 0);
475 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
477 IEM_MC_PREPARE_SSE_USAGE();
478 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
479 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
480 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
481 IEM_MC_ADVANCE_RIP_AND_FINISH();
482 IEM_MC_END();
483 }
484 else
485 {
486 /*
487 * XMM, [mem128].
488 */
489 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER);
490 IEM_MC_ARG(PRTUINT128U, pDst, 0);
491 IEM_MC_LOCAL(RTUINT128U, uSrc);
492 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
494
495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
498 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
499
500 IEM_MC_PREPARE_SSE_USAGE();
501 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
502 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
503
504 IEM_MC_ADVANCE_RIP_AND_FINISH();
505 IEM_MC_END();
506 }
507}
508
509
510/**
511 * Common worker for MMX instructions on the forms:
512 * pxxxx mm1, mm2/mem32
513 *
514 * The 2nd operand is the first half of a register, which in the memory case
515 * means a 32-bit memory access.
516 */
517FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
518{
519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
520 if (IEM_IS_MODRM_REG_MODE(bRm))
521 {
522 /*
523 * MMX, MMX.
524 */
525 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
527 IEM_MC_ARG(uint64_t *, puDst, 0);
528 IEM_MC_ARG(uint64_t const *, puSrc, 1);
529 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
530 IEM_MC_PREPARE_FPU_USAGE();
531 IEM_MC_FPU_TO_MMX_MODE();
532
533 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
534 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
535 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
536 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
537
538 IEM_MC_ADVANCE_RIP_AND_FINISH();
539 IEM_MC_END();
540 }
541 else
542 {
543 /*
544 * MMX, [mem32].
545 */
546 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER);
547 IEM_MC_ARG(uint64_t *, puDst, 0);
548 IEM_MC_LOCAL(uint64_t, uSrc);
549 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
551
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
554 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
555 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
556
557 IEM_MC_PREPARE_FPU_USAGE();
558 IEM_MC_FPU_TO_MMX_MODE();
559
560 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
561 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
562 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
563
564 IEM_MC_ADVANCE_RIP_AND_FINISH();
565 IEM_MC_END();
566 }
567}
568
569
570/**
571 * Common worker for SSE instructions on the forms:
572 * pxxxx xmm1, xmm2/mem128
573 *
574 * The 2nd operand is the first half of a register, which in the memory case
575 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
576 *
577 * Exceptions type 4.
578 */
579FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
580{
581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
582 if (IEM_IS_MODRM_REG_MODE(bRm))
583 {
584 /*
585 * XMM, XMM.
586 */
587 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
589 IEM_MC_ARG(PRTUINT128U, puDst, 0);
590 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
591 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
592 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
593 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
594 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
595 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
596 IEM_MC_ADVANCE_RIP_AND_FINISH();
597 IEM_MC_END();
598 }
599 else
600 {
601 /*
602 * XMM, [mem128].
603 */
604 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER);
605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
606 IEM_MC_LOCAL(RTUINT128U, uSrc);
607 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
609
610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
612 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
613 /** @todo Most CPUs probably only read the low qword. We read everything to
614 * make sure we apply segmentation and alignment checks correctly.
615 * When we have time, it would be interesting to explore what real
616 * CPUs actually does and whether it will do a TLB load for the high
617 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
618 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
619
620 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
621 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
622 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
623
624 IEM_MC_ADVANCE_RIP_AND_FINISH();
625 IEM_MC_END();
626 }
627}
628
629
630/**
631 * Common worker for SSE2 instructions on the forms:
632 * pxxxx xmm1, xmm2/mem128
633 *
634 * The 2nd operand is the first half of a register, which in the memory case
635 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
636 *
637 * Exceptions type 4.
638 */
639FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
640{
641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
642 if (IEM_IS_MODRM_REG_MODE(bRm))
643 {
644 /*
645 * XMM, XMM.
646 */
647 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
649 IEM_MC_ARG(PRTUINT128U, puDst, 0);
650 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
651 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
653 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
654 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
655 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
656 IEM_MC_ADVANCE_RIP_AND_FINISH();
657 IEM_MC_END();
658 }
659 else
660 {
661 /*
662 * XMM, [mem128].
663 */
664 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER);
665 IEM_MC_ARG(PRTUINT128U, puDst, 0);
666 IEM_MC_LOCAL(RTUINT128U, uSrc);
667 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
669
670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
672 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
673 /** @todo Most CPUs probably only read the low qword. We read everything to
674 * make sure we apply segmentation and alignment checks correctly.
675 * When we have time, it would be interesting to explore what real
676 * CPUs actually does and whether it will do a TLB load for the high
677 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
678 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
679
680 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
681 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
682 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
683
684 IEM_MC_ADVANCE_RIP_AND_FINISH();
685 IEM_MC_END();
686 }
687}
688
689
690/**
691 * Common worker for MMX instructions on the form:
692 * pxxxx mm1, mm2/mem64
693 *
694 * The 2nd operand is the second half of a register, which in the memory case
695 * means a 64-bit memory access for MMX.
696 */
697FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
698{
699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
700 if (IEM_IS_MODRM_REG_MODE(bRm))
701 {
702 /*
703 * MMX, MMX.
704 */
705 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
706 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
707 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
709 IEM_MC_ARG(uint64_t *, puDst, 0);
710 IEM_MC_ARG(uint64_t const *, puSrc, 1);
711 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
712 IEM_MC_PREPARE_FPU_USAGE();
713 IEM_MC_FPU_TO_MMX_MODE();
714
715 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
716 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
717 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
718 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
719
720 IEM_MC_ADVANCE_RIP_AND_FINISH();
721 IEM_MC_END();
722 }
723 else
724 {
725 /*
726 * MMX, [mem64].
727 */
728 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER);
729 IEM_MC_ARG(uint64_t *, puDst, 0);
730 IEM_MC_LOCAL(uint64_t, uSrc);
731 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
733
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
736 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
737 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
738
739 IEM_MC_PREPARE_FPU_USAGE();
740 IEM_MC_FPU_TO_MMX_MODE();
741
742 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
743 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
744 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
745
746 IEM_MC_ADVANCE_RIP_AND_FINISH();
747 IEM_MC_END();
748 }
749}
750
751
752/**
753 * Common worker for SSE instructions on the form:
754 * pxxxx xmm1, xmm2/mem128
755 *
756 * The 2nd operand is the second half of a register, which for SSE a 128-bit
757 * aligned access where it may read the full 128 bits or only the upper 64 bits.
758 *
759 * Exceptions type 4.
760 */
761FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
762{
763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
764 if (IEM_IS_MODRM_REG_MODE(bRm))
765 {
766 /*
767 * XMM, XMM.
768 */
769 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
771 IEM_MC_ARG(PRTUINT128U, puDst, 0);
772 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
773 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
774 IEM_MC_PREPARE_SSE_USAGE();
775 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
776 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
777 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
778 IEM_MC_ADVANCE_RIP_AND_FINISH();
779 IEM_MC_END();
780 }
781 else
782 {
783 /*
784 * XMM, [mem128].
785 */
786 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER);
787 IEM_MC_ARG(PRTUINT128U, puDst, 0);
788 IEM_MC_LOCAL(RTUINT128U, uSrc);
789 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
791
792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
794 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
795 /** @todo Most CPUs probably only read the high qword. We read everything to
796 * make sure we apply segmentation and alignment checks correctly.
797 * When we have time, it would be interesting to explore what real
798 * CPUs actually does and whether it will do a TLB load for the lower
799 * part or skip any associated \#PF. */
800 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801
802 IEM_MC_PREPARE_SSE_USAGE();
803 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
804 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
805
806 IEM_MC_ADVANCE_RIP_AND_FINISH();
807 IEM_MC_END();
808 }
809}
810
811
812/**
813 * Common worker for SSE instructions on the forms:
814 * pxxs xmm1, xmm2/mem128
815 *
816 * Proper alignment of the 128-bit operand is enforced.
817 * Exceptions type 2. SSE cpuid checks.
818 *
819 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
820 */
821FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
822{
823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
824 if (IEM_IS_MODRM_REG_MODE(bRm))
825 {
826 /*
827 * XMM128, XMM128.
828 */
829 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER);
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
831 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
832 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
833 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
834 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
835 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
836 IEM_MC_PREPARE_SSE_USAGE();
837 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
838 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
839 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
840 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
841 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
842
843 IEM_MC_ADVANCE_RIP_AND_FINISH();
844 IEM_MC_END();
845 }
846 else
847 {
848 /*
849 * XMM128, [mem128].
850 */
851 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER);
852 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
853 IEM_MC_LOCAL(X86XMMREG, uSrc2);
854 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
855 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
856 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
858
859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
861 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
862 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
863
864 IEM_MC_PREPARE_SSE_USAGE();
865 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
866 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
867 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
868 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
869
870 IEM_MC_ADVANCE_RIP_AND_FINISH();
871 IEM_MC_END();
872 }
873}
874
875
876/**
877 * Common worker for SSE instructions on the forms:
878 * pxxs xmm1, xmm2/mem32
879 *
880 * Proper alignment of the 128-bit operand is enforced.
881 * Exceptions type 2. SSE cpuid checks.
882 *
883 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
884 */
885FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
886{
887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
888 if (IEM_IS_MODRM_REG_MODE(bRm))
889 {
890 /*
891 * XMM128, XMM32.
892 */
893 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER);
894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
895 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
896 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
897 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
898 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
899 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
900 IEM_MC_PREPARE_SSE_USAGE();
901 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
902 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
903 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
904 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
905 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
906
907 IEM_MC_ADVANCE_RIP_AND_FINISH();
908 IEM_MC_END();
909 }
910 else
911 {
912 /*
913 * XMM128, [mem32].
914 */
915 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER);
916 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
917 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
918 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
919 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
920 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
922
923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
925 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
926 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
927
928 IEM_MC_PREPARE_SSE_USAGE();
929 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
930 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
931 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
932 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
933
934 IEM_MC_ADVANCE_RIP_AND_FINISH();
935 IEM_MC_END();
936 }
937}
938
939
940/**
941 * Common worker for SSE2 instructions on the forms:
942 * pxxd xmm1, xmm2/mem128
943 *
944 * Proper alignment of the 128-bit operand is enforced.
945 * Exceptions type 2. SSE cpuid checks.
946 *
947 * @sa iemOpCommonSseFp_FullFull_To_Full
948 */
949FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
950{
951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
952 if (IEM_IS_MODRM_REG_MODE(bRm))
953 {
954 /*
955 * XMM128, XMM128.
956 */
957 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER);
958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
959 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
960 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
961 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
962 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
963 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
964 IEM_MC_PREPARE_SSE_USAGE();
965 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
966 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
967 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
968 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
969 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
970
971 IEM_MC_ADVANCE_RIP_AND_FINISH();
972 IEM_MC_END();
973 }
974 else
975 {
976 /*
977 * XMM128, [mem128].
978 */
979 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER);
980 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
981 IEM_MC_LOCAL(X86XMMREG, uSrc2);
982 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
983 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
984 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
989 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
990 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
991
992 IEM_MC_PREPARE_SSE_USAGE();
993 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
994 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
995 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
996 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
997
998 IEM_MC_ADVANCE_RIP_AND_FINISH();
999 IEM_MC_END();
1000 }
1001}
1002
1003
1004/**
1005 * Common worker for SSE2 instructions on the forms:
1006 * pxxs xmm1, xmm2/mem64
1007 *
1008 * Proper alignment of the 128-bit operand is enforced.
1009 * Exceptions type 2. SSE2 cpuid checks.
1010 *
1011 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1012 */
1013FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
1014{
1015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1016 if (IEM_IS_MODRM_REG_MODE(bRm))
1017 {
1018 /*
1019 * XMM, XMM.
1020 */
1021 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER);
1022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1023 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1024 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1025 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1026 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
1027 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1028 IEM_MC_PREPARE_SSE_USAGE();
1029 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1030 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1031 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
1032 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1033 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1034
1035 IEM_MC_ADVANCE_RIP_AND_FINISH();
1036 IEM_MC_END();
1037 }
1038 else
1039 {
1040 /*
1041 * XMM, [mem64].
1042 */
1043 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER);
1044 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1045 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
1046 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1047 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1048 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
1049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1050
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1053 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1054 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1055
1056 IEM_MC_PREPARE_SSE_USAGE();
1057 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1058 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
1059 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1060 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1061
1062 IEM_MC_ADVANCE_RIP_AND_FINISH();
1063 IEM_MC_END();
1064 }
1065}
1066
1067
1068/**
1069 * Common worker for SSE2 instructions on the form:
1070 * pxxxx xmm1, xmm2/mem128
1071 *
1072 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1073 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1074 *
1075 * Exceptions type 4.
1076 */
1077FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1078{
1079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1080 if (IEM_IS_MODRM_REG_MODE(bRm))
1081 {
1082 /*
1083 * XMM, XMM.
1084 */
1085 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
1086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1087 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1088 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1089 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1090 IEM_MC_PREPARE_SSE_USAGE();
1091 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1092 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1093 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1094 IEM_MC_ADVANCE_RIP_AND_FINISH();
1095 IEM_MC_END();
1096 }
1097 else
1098 {
1099 /*
1100 * XMM, [mem128].
1101 */
1102 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER);
1103 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1104 IEM_MC_LOCAL(RTUINT128U, uSrc);
1105 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1107
1108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1110 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1111 /** @todo Most CPUs probably only read the high qword. We read everything to
1112 * make sure we apply segmentation and alignment checks correctly.
1113 * When we have time, it would be interesting to explore what real
1114 * CPUs actually does and whether it will do a TLB load for the lower
1115 * part or skip any associated \#PF. */
1116 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1117
1118 IEM_MC_PREPARE_SSE_USAGE();
1119 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1120 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1121
1122 IEM_MC_ADVANCE_RIP_AND_FINISH();
1123 IEM_MC_END();
1124 }
1125}
1126
1127
1128/**
1129 * Common worker for SSE3 instructions on the forms:
1130 * hxxx xmm1, xmm2/mem128
1131 *
1132 * Proper alignment of the 128-bit operand is enforced.
1133 * Exceptions type 2. SSE3 cpuid checks.
1134 *
1135 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1136 */
1137FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1138{
1139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1140 if (IEM_IS_MODRM_REG_MODE(bRm))
1141 {
1142 /*
1143 * XMM, XMM.
1144 */
1145 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER);
1146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1147 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1148 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1149 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1150 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1151 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1152 IEM_MC_PREPARE_SSE_USAGE();
1153 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1154 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1155 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1156 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1157 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1158
1159 IEM_MC_ADVANCE_RIP_AND_FINISH();
1160 IEM_MC_END();
1161 }
1162 else
1163 {
1164 /*
1165 * XMM, [mem128].
1166 */
1167 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER);
1168 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1169 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1170 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1171 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1172 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1174
1175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1177 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1178 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1179
1180 IEM_MC_PREPARE_SSE_USAGE();
1181 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1182 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1183 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1184 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1185
1186 IEM_MC_ADVANCE_RIP_AND_FINISH();
1187 IEM_MC_END();
1188 }
1189}
1190
1191
1192/** Opcode 0x0f 0x00 /0. */
1193FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1194{
1195 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1196 IEMOP_HLP_MIN_286();
1197 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1198
1199 if (IEM_IS_MODRM_REG_MODE(bRm))
1200 {
1201 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1202 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1203 }
1204
1205 /* Ignore operand size here, memory refs are always 16-bit. */
1206 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286);
1207 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1208 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1210 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1211 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1212 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1213 IEM_MC_END();
1214}
1215
1216
1217/** Opcode 0x0f 0x00 /1. */
1218FNIEMOPRM_DEF(iemOp_Grp6_str)
1219{
1220 IEMOP_MNEMONIC(str, "str Rv/Mw");
1221 IEMOP_HLP_MIN_286();
1222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1223
1224
1225 if (IEM_IS_MODRM_REG_MODE(bRm))
1226 {
1227 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1228 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1229 }
1230
1231 /* Ignore operand size here, memory refs are always 16-bit. */
1232 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286);
1233 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1234 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1236 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1237 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1238 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1239 IEM_MC_END();
1240}
1241
1242
1243/** Opcode 0x0f 0x00 /2. */
1244FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1245{
1246 IEMOP_MNEMONIC(lldt, "lldt Ew");
1247 IEMOP_HLP_MIN_286();
1248 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1249
1250 if (IEM_IS_MODRM_REG_MODE(bRm))
1251 {
1252 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_286);
1253 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1254 IEM_MC_ARG(uint16_t, u16Sel, 0);
1255 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1256 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_lldt, u16Sel);
1257 IEM_MC_END();
1258 }
1259 else
1260 {
1261 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_286);
1262 IEM_MC_ARG(uint16_t, u16Sel, 0);
1263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1265 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1266 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1267 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1268 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_lldt, u16Sel);
1269 IEM_MC_END();
1270 }
1271}
1272
1273
1274/** Opcode 0x0f 0x00 /3. */
1275FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1276{
1277 IEMOP_MNEMONIC(ltr, "ltr Ew");
1278 IEMOP_HLP_MIN_286();
1279 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1280
1281 if (IEM_IS_MODRM_REG_MODE(bRm))
1282 {
1283 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_286);
1284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1285 IEM_MC_ARG(uint16_t, u16Sel, 0);
1286 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1287 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_ltr, u16Sel);
1288 IEM_MC_END();
1289 }
1290 else
1291 {
1292 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_286);
1293 IEM_MC_ARG(uint16_t, u16Sel, 0);
1294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1297 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1298 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1299 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_ltr, u16Sel);
1300 IEM_MC_END();
1301 }
1302}
1303
1304
1305/** Opcode 0x0f 0x00 /3. */
1306FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1307{
1308 IEMOP_HLP_MIN_286();
1309 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1310
1311 if (IEM_IS_MODRM_REG_MODE(bRm))
1312 {
1313 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286);
1314 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1315 IEM_MC_ARG(uint16_t, u16Sel, 0);
1316 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1317 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1318 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_VerX, u16Sel, fWriteArg);
1319 IEM_MC_END();
1320 }
1321 else
1322 {
1323 IEM_MC_BEGIN(2, 1, IEM_MC_F_MIN_286);
1324 IEM_MC_ARG(uint16_t, u16Sel, 0);
1325 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1328 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1329 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1330 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_VerX, u16Sel, fWriteArg);
1331 IEM_MC_END();
1332 }
1333}
1334
1335
1336/** Opcode 0x0f 0x00 /4. */
1337FNIEMOPRM_DEF(iemOp_Grp6_verr)
1338{
1339 IEMOP_MNEMONIC(verr, "verr Ew");
1340 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1341}
1342
1343
1344/** Opcode 0x0f 0x00 /5. */
1345FNIEMOPRM_DEF(iemOp_Grp6_verw)
1346{
1347 IEMOP_MNEMONIC(verw, "verw Ew");
1348 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1349}
1350
1351
1352/**
1353 * Group 6 jump table.
1354 */
1355IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1356{
1357 iemOp_Grp6_sldt,
1358 iemOp_Grp6_str,
1359 iemOp_Grp6_lldt,
1360 iemOp_Grp6_ltr,
1361 iemOp_Grp6_verr,
1362 iemOp_Grp6_verw,
1363 iemOp_InvalidWithRM,
1364 iemOp_InvalidWithRM
1365};
1366
1367/** Opcode 0x0f 0x00. */
1368FNIEMOP_DEF(iemOp_Grp6)
1369{
1370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1371 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1372}
1373
1374
1375/** Opcode 0x0f 0x01 /0. */
1376FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1377{
1378 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1379 IEMOP_HLP_MIN_286();
1380 IEMOP_HLP_64BIT_OP_SIZE();
1381 IEM_MC_BEGIN(2, 1, IEM_MC_F_MIN_286);
1382 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1383 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1386 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1387 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1388 IEM_MC_END();
1389}
1390
1391
1392/** Opcode 0x0f 0x01 /0. */
1393FNIEMOP_DEF(iemOp_Grp7_vmcall)
1394{
1395 IEMOP_MNEMONIC(vmcall, "vmcall");
1396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1397
1398 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1399 want all hypercalls regardless of instruction used, and if a
1400 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1401 (NEM/win makes ASSUMPTIONS about this behavior.) */
1402 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, iemCImpl_vmcall);
1403}
1404
1405
1406/** Opcode 0x0f 0x01 /0. */
1407#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1408FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1409{
1410 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1411 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1412 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1413 IEMOP_HLP_DONE_DECODING();
1414 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1415 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
1416 iemCImpl_vmlaunch);
1417}
1418#else
1419FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1420{
1421 IEMOP_BITCH_ABOUT_STUB();
1422 IEMOP_RAISE_INVALID_OPCODE_RET();
1423}
1424#endif
1425
1426
1427/** Opcode 0x0f 0x01 /0. */
1428#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1429FNIEMOP_DEF(iemOp_Grp7_vmresume)
1430{
1431 IEMOP_MNEMONIC(vmresume, "vmresume");
1432 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1433 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1434 IEMOP_HLP_DONE_DECODING();
1435 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1436 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
1437 iemCImpl_vmresume);
1438}
1439#else
1440FNIEMOP_DEF(iemOp_Grp7_vmresume)
1441{
1442 IEMOP_BITCH_ABOUT_STUB();
1443 IEMOP_RAISE_INVALID_OPCODE_RET();
1444}
1445#endif
1446
1447
1448/** Opcode 0x0f 0x01 /0. */
1449#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1450FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1451{
1452 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1453 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1454 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1455 IEMOP_HLP_DONE_DECODING();
1456 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmxoff);
1457}
1458#else
1459FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1460{
1461 IEMOP_BITCH_ABOUT_STUB();
1462 IEMOP_RAISE_INVALID_OPCODE_RET();
1463}
1464#endif
1465
1466
1467/** Opcode 0x0f 0x01 /1. */
1468FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1469{
1470 IEMOP_MNEMONIC(sidt, "sidt Ms");
1471 IEMOP_HLP_MIN_286();
1472 IEMOP_HLP_64BIT_OP_SIZE();
1473 IEM_MC_BEGIN(2, 1, IEM_MC_F_MIN_286);
1474 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1475 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1478 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1479 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1480 IEM_MC_END();
1481}
1482
1483
1484/** Opcode 0x0f 0x01 /1. */
1485FNIEMOP_DEF(iemOp_Grp7_monitor)
1486{
1487 IEMOP_MNEMONIC(monitor, "monitor");
1488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1489 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1490}
1491
1492
1493/** Opcode 0x0f 0x01 /1. */
1494FNIEMOP_DEF(iemOp_Grp7_mwait)
1495{
1496 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1498 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, iemCImpl_mwait);
1499}
1500
1501
1502/** Opcode 0x0f 0x01 /2. */
1503FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1504{
1505 IEMOP_MNEMONIC(lgdt, "lgdt");
1506 IEMOP_HLP_64BIT_OP_SIZE();
1507 IEM_MC_BEGIN(3, 1, 0);
1508 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1509 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1510 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1513 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1514 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1515 IEM_MC_END();
1516}
1517
1518
1519/** Opcode 0x0f 0x01 0xd0. */
1520FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1521{
1522 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1523 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1524 {
1525 /** @todo r=ramshankar: We should use
1526 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1527 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1528/** @todo testcase: test prefixes and exceptions. currently not checking for the
1529 * OPSIZE one ... */
1530 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1531 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_xgetbv);
1532 }
1533 IEMOP_RAISE_INVALID_OPCODE_RET();
1534}
1535
1536
1537/** Opcode 0x0f 0x01 0xd1. */
1538FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1539{
1540 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1541 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1542 {
1543 /** @todo r=ramshankar: We should use
1544 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1545 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1546/** @todo testcase: test prefixes and exceptions. currently not checking for the
1547 * OPSIZE one ... */
1548 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1549 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_xsetbv);
1550 }
1551 IEMOP_RAISE_INVALID_OPCODE_RET();
1552}
1553
1554
1555/** Opcode 0x0f 0x01 /3. */
1556FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1557{
1558 IEMOP_MNEMONIC(lidt, "lidt");
1559 IEMMODE enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : pVCpu->iem.s.enmEffOpSize;
1560 IEM_MC_BEGIN(3, 1, 0);
1561 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1562 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1563 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
1564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1566 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1567 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1568 IEM_MC_END();
1569}
1570
1571
1572/** Opcode 0x0f 0x01 0xd8. */
1573#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1574FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1575{
1576 IEMOP_MNEMONIC(vmrun, "vmrun");
1577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1578 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1579 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
1580 iemCImpl_vmrun);
1581}
1582#else
1583FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1584#endif
1585
1586/** Opcode 0x0f 0x01 0xd9. */
1587FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1588{
1589 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1590 /** @todo r=bird: Table A-8 on page 524 in vol 3 has VMGEXIT for this
1591 * opcode sequence when F3 or F2 is used as prefix. So, the assumtion
1592 * here cannot be right... */
1593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1594
1595 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1596 want all hypercalls regardless of instruction used, and if a
1597 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1598 (NEM/win makes ASSUMPTIONS about this behavior.) */
1599 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmmcall);
1600}
1601
1602/** Opcode 0x0f 0x01 0xda. */
1603#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1604FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1605{
1606 IEMOP_MNEMONIC(vmload, "vmload");
1607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1608 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmload);
1609}
1610#else
1611FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1612#endif
1613
1614
1615/** Opcode 0x0f 0x01 0xdb. */
1616#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1617FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1618{
1619 IEMOP_MNEMONIC(vmsave, "vmsave");
1620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1621 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmsave);
1622}
1623#else
1624FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1625#endif
1626
1627
1628/** Opcode 0x0f 0x01 0xdc. */
1629#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1630FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1631{
1632 IEMOP_MNEMONIC(stgi, "stgi");
1633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1634 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_stgi);
1635}
1636#else
1637FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1638#endif
1639
1640
1641/** Opcode 0x0f 0x01 0xdd. */
1642#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1643FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1644{
1645 IEMOP_MNEMONIC(clgi, "clgi");
1646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1647 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_clgi);
1648}
1649#else
1650FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1651#endif
1652
1653
1654/** Opcode 0x0f 0x01 0xdf. */
1655#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1656FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1657{
1658 IEMOP_MNEMONIC(invlpga, "invlpga");
1659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1660 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_invlpga);
1661}
1662#else
1663FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1664#endif
1665
1666
1667/** Opcode 0x0f 0x01 0xde. */
1668#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1669FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1670{
1671 IEMOP_MNEMONIC(skinit, "skinit");
1672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1673 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_skinit);
1674}
1675#else
1676FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1677#endif
1678
1679
1680/** Opcode 0x0f 0x01 /4. */
1681FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1682{
1683 IEMOP_MNEMONIC(smsw, "smsw");
1684 IEMOP_HLP_MIN_286();
1685 if (IEM_IS_MODRM_REG_MODE(bRm))
1686 {
1687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1688 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1689 }
1690
1691 /* Ignore operand size here, memory refs are always 16-bit. */
1692 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286);
1693 IEM_MC_ARG(uint16_t, iEffSeg, 0);
1694 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1697 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1698 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1699 IEM_MC_END();
1700}
1701
1702
1703/** Opcode 0x0f 0x01 /6. */
1704FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1705{
1706 /* The operand size is effectively ignored, all is 16-bit and only the
1707 lower 3-bits are used. */
1708 IEMOP_MNEMONIC(lmsw, "lmsw");
1709 IEMOP_HLP_MIN_286();
1710 if (IEM_IS_MODRM_REG_MODE(bRm))
1711 {
1712 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286);
1713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1714 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1715 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1716 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1717 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1718 IEM_MC_END();
1719 }
1720 else
1721 {
1722 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286);
1723 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1724 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1727 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1728 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1729 IEM_MC_END();
1730 }
1731}
1732
1733
1734/** Opcode 0x0f 0x01 /7. */
1735FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1736{
1737 IEMOP_MNEMONIC(invlpg, "invlpg");
1738 IEMOP_HLP_MIN_486();
1739 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386);
1740 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1743 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_invlpg, GCPtrEffDst);
1744 IEM_MC_END();
1745}
1746
1747
1748/** Opcode 0x0f 0x01 0xf8. */
1749FNIEMOP_DEF(iemOp_Grp7_swapgs)
1750{
1751 IEMOP_MNEMONIC(swapgs, "swapgs");
1752 IEMOP_HLP_ONLY_64BIT();
1753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1754 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_swapgs);
1755}
1756
1757
1758/** Opcode 0x0f 0x01 0xf9. */
1759FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1760{
1761 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1763 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdtscp);
1764}
1765
1766
1767/**
1768 * Group 7 jump table, memory variant.
1769 */
1770IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1771{
1772 iemOp_Grp7_sgdt,
1773 iemOp_Grp7_sidt,
1774 iemOp_Grp7_lgdt,
1775 iemOp_Grp7_lidt,
1776 iemOp_Grp7_smsw,
1777 iemOp_InvalidWithRM,
1778 iemOp_Grp7_lmsw,
1779 iemOp_Grp7_invlpg
1780};
1781
1782
1783/** Opcode 0x0f 0x01. */
1784FNIEMOP_DEF(iemOp_Grp7)
1785{
1786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1787 if (IEM_IS_MODRM_MEM_MODE(bRm))
1788 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1789
1790 switch (IEM_GET_MODRM_REG_8(bRm))
1791 {
1792 case 0:
1793 switch (IEM_GET_MODRM_RM_8(bRm))
1794 {
1795 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1796 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1797 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1798 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1799 }
1800 IEMOP_RAISE_INVALID_OPCODE_RET();
1801
1802 case 1:
1803 switch (IEM_GET_MODRM_RM_8(bRm))
1804 {
1805 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1806 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1807 }
1808 IEMOP_RAISE_INVALID_OPCODE_RET();
1809
1810 case 2:
1811 switch (IEM_GET_MODRM_RM_8(bRm))
1812 {
1813 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1814 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1815 }
1816 IEMOP_RAISE_INVALID_OPCODE_RET();
1817
1818 case 3:
1819 switch (IEM_GET_MODRM_RM_8(bRm))
1820 {
1821 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1822 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1823 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1824 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1825 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1826 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1827 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1828 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1829 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1830 }
1831
1832 case 4:
1833 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1834
1835 case 5:
1836 IEMOP_RAISE_INVALID_OPCODE_RET();
1837
1838 case 6:
1839 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1840
1841 case 7:
1842 switch (IEM_GET_MODRM_RM_8(bRm))
1843 {
1844 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1845 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1846 }
1847 IEMOP_RAISE_INVALID_OPCODE_RET();
1848
1849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1850 }
1851}
1852
1853/** Opcode 0x0f 0x00 /3. */
1854FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1855{
1856 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1858
1859 if (IEM_IS_MODRM_REG_MODE(bRm))
1860 {
1861 switch (pVCpu->iem.s.enmEffOpSize)
1862 {
1863 case IEMMODE_16BIT:
1864 IEM_MC_BEGIN(3, 0, 0);
1865 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1866 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1867 IEM_MC_ARG(uint16_t, u16Sel, 1);
1868 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1869
1870 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1871 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1872 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1873
1874 IEM_MC_END();
1875 break;
1876
1877 case IEMMODE_32BIT:
1878 case IEMMODE_64BIT:
1879 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386);
1880 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1881 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1882 IEM_MC_ARG(uint16_t, u16Sel, 1);
1883 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1884
1885 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1886 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1887 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1888
1889 IEM_MC_END();
1890 break;
1891
1892 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1893 }
1894 }
1895 else
1896 {
1897 switch (pVCpu->iem.s.enmEffOpSize)
1898 {
1899 case IEMMODE_16BIT:
1900 IEM_MC_BEGIN(3, 1, 0);
1901 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1902 IEM_MC_ARG(uint16_t, u16Sel, 1);
1903 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1905
1906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1907 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1908
1909 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1910 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1911 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1912
1913 IEM_MC_END();
1914 break;
1915
1916 case IEMMODE_32BIT:
1917 case IEMMODE_64BIT:
1918 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386);
1919 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1920 IEM_MC_ARG(uint16_t, u16Sel, 1);
1921 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1923
1924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1925 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1926/** @todo testcase: make sure it's a 16-bit read. */
1927
1928 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1929 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1930 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1931
1932 IEM_MC_END();
1933 break;
1934
1935 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1936 }
1937 }
1938}
1939
1940
1941
1942/** Opcode 0x0f 0x02. */
1943FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1944{
1945 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1946 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1947}
1948
1949
1950/** Opcode 0x0f 0x03. */
1951FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1952{
1953 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1954 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1955}
1956
1957
1958/** Opcode 0x0f 0x05. */
1959FNIEMOP_DEF(iemOp_syscall)
1960{
1961 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1963 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1964 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1965 iemCImpl_syscall);
1966}
1967
1968
1969/** Opcode 0x0f 0x06. */
1970FNIEMOP_DEF(iemOp_clts)
1971{
1972 IEMOP_MNEMONIC(clts, "clts");
1973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1974 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_clts);
1975}
1976
1977
1978/** Opcode 0x0f 0x07. */
1979FNIEMOP_DEF(iemOp_sysret)
1980{
1981 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1983 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1984 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1985 iemCImpl_sysret, pVCpu->iem.s.enmEffOpSize);
1986}
1987
1988
1989/** Opcode 0x0f 0x08. */
1990FNIEMOP_DEF(iemOp_invd)
1991{
1992 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1993 IEMOP_HLP_MIN_486();
1994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1995 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_invd);
1996}
1997
1998
1999/** Opcode 0x0f 0x09. */
2000FNIEMOP_DEF(iemOp_wbinvd)
2001{
2002 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
2003 IEMOP_HLP_MIN_486();
2004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2005 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_wbinvd);
2006}
2007
2008
2009/** Opcode 0x0f 0x0b. */
2010FNIEMOP_DEF(iemOp_ud2)
2011{
2012 IEMOP_MNEMONIC(ud2, "ud2");
2013 IEMOP_RAISE_INVALID_OPCODE_RET();
2014}
2015
2016/** Opcode 0x0f 0x0d. */
2017FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
2018{
2019 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
2020 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
2021 {
2022 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
2023 IEMOP_RAISE_INVALID_OPCODE_RET();
2024 }
2025
2026 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2027 if (IEM_IS_MODRM_REG_MODE(bRm))
2028 {
2029 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
2030 IEMOP_RAISE_INVALID_OPCODE_RET();
2031 }
2032
2033 switch (IEM_GET_MODRM_REG_8(bRm))
2034 {
2035 case 2: /* Aliased to /0 for the time being. */
2036 case 4: /* Aliased to /0 for the time being. */
2037 case 5: /* Aliased to /0 for the time being. */
2038 case 6: /* Aliased to /0 for the time being. */
2039 case 7: /* Aliased to /0 for the time being. */
2040 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2041 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2042 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2043 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2044 }
2045
2046 IEM_MC_BEGIN(0, 1, 0);
2047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2050 /* Currently a NOP. */
2051 NOREF(GCPtrEffSrc);
2052 IEM_MC_ADVANCE_RIP_AND_FINISH();
2053 IEM_MC_END();
2054}
2055
2056
2057/** Opcode 0x0f 0x0e. */
2058FNIEMOP_DEF(iemOp_femms)
2059{
2060 IEMOP_MNEMONIC(femms, "femms");
2061
2062 IEM_MC_BEGIN(0, 0, 0);
2063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2064 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2065 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2066 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2067 IEM_MC_FPU_FROM_MMX_MODE();
2068 IEM_MC_ADVANCE_RIP_AND_FINISH();
2069 IEM_MC_END();
2070}
2071
2072
2073/** Opcode 0x0f 0x0f. */
2074FNIEMOP_DEF(iemOp_3Dnow)
2075{
2076 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2077 {
2078 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2079 IEMOP_RAISE_INVALID_OPCODE_RET();
2080 }
2081
2082#ifdef IEM_WITH_3DNOW
2083 /* This is pretty sparse, use switch instead of table. */
2084 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2085 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2086#else
2087 IEMOP_BITCH_ABOUT_STUB();
2088 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2089#endif
2090}
2091
2092
2093/**
2094 * @opcode 0x10
2095 * @oppfx none
2096 * @opcpuid sse
2097 * @opgroup og_sse_simdfp_datamove
2098 * @opxcpttype 4UA
2099 * @optest op1=1 op2=2 -> op1=2
2100 * @optest op1=0 op2=-22 -> op1=-22
2101 */
2102FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2103{
2104 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2106 if (IEM_IS_MODRM_REG_MODE(bRm))
2107 {
2108 /*
2109 * XMM128, XMM128.
2110 */
2111 IEM_MC_BEGIN(0, 0, 0);
2112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2113 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2114 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2115 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2116 IEM_GET_MODRM_RM(pVCpu, bRm));
2117 IEM_MC_ADVANCE_RIP_AND_FINISH();
2118 IEM_MC_END();
2119 }
2120 else
2121 {
2122 /*
2123 * XMM128, [mem128].
2124 */
2125 IEM_MC_BEGIN(0, 2, 0);
2126 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2128
2129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2131 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2132 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2133
2134 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2135 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2136
2137 IEM_MC_ADVANCE_RIP_AND_FINISH();
2138 IEM_MC_END();
2139 }
2140
2141}
2142
2143
2144/**
2145 * @opcode 0x10
2146 * @oppfx 0x66
2147 * @opcpuid sse2
2148 * @opgroup og_sse2_pcksclr_datamove
2149 * @opxcpttype 4UA
2150 * @optest op1=1 op2=2 -> op1=2
2151 * @optest op1=0 op2=-42 -> op1=-42
2152 */
2153FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2154{
2155 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2157 if (IEM_IS_MODRM_REG_MODE(bRm))
2158 {
2159 /*
2160 * XMM128, XMM128.
2161 */
2162 IEM_MC_BEGIN(0, 0, 0);
2163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2164 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2165 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2166 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2167 IEM_GET_MODRM_RM(pVCpu, bRm));
2168 IEM_MC_ADVANCE_RIP_AND_FINISH();
2169 IEM_MC_END();
2170 }
2171 else
2172 {
2173 /*
2174 * XMM128, [mem128].
2175 */
2176 IEM_MC_BEGIN(0, 2, 0);
2177 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2179
2180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2182 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2183 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2184
2185 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2186 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2187
2188 IEM_MC_ADVANCE_RIP_AND_FINISH();
2189 IEM_MC_END();
2190 }
2191}
2192
2193
2194/**
2195 * @opcode 0x10
2196 * @oppfx 0xf3
2197 * @opcpuid sse
2198 * @opgroup og_sse_simdfp_datamove
2199 * @opxcpttype 5
2200 * @optest op1=1 op2=2 -> op1=2
2201 * @optest op1=0 op2=-22 -> op1=-22
2202 */
2203FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2204{
2205 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2206 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2207 if (IEM_IS_MODRM_REG_MODE(bRm))
2208 {
2209 /*
2210 * XMM32, XMM32.
2211 */
2212 IEM_MC_BEGIN(0, 1, 0);
2213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2214 IEM_MC_LOCAL(uint32_t, uSrc);
2215
2216 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2217 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2218 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2219 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2220
2221 IEM_MC_ADVANCE_RIP_AND_FINISH();
2222 IEM_MC_END();
2223 }
2224 else
2225 {
2226 /*
2227 * XMM128, [mem32].
2228 */
2229 IEM_MC_BEGIN(0, 2, 0);
2230 IEM_MC_LOCAL(uint32_t, uSrc);
2231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2232
2233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2235 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2236 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2237
2238 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2239 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2240
2241 IEM_MC_ADVANCE_RIP_AND_FINISH();
2242 IEM_MC_END();
2243 }
2244}
2245
2246
2247/**
2248 * @opcode 0x10
2249 * @oppfx 0xf2
2250 * @opcpuid sse2
2251 * @opgroup og_sse2_pcksclr_datamove
2252 * @opxcpttype 5
2253 * @optest op1=1 op2=2 -> op1=2
2254 * @optest op1=0 op2=-42 -> op1=-42
2255 */
2256FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2257{
2258 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2259 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2260 if (IEM_IS_MODRM_REG_MODE(bRm))
2261 {
2262 /*
2263 * XMM64, XMM64.
2264 */
2265 IEM_MC_BEGIN(0, 1, 0);
2266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2267 IEM_MC_LOCAL(uint64_t, uSrc);
2268
2269 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2270 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2271 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2272 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2273
2274 IEM_MC_ADVANCE_RIP_AND_FINISH();
2275 IEM_MC_END();
2276 }
2277 else
2278 {
2279 /*
2280 * XMM128, [mem64].
2281 */
2282 IEM_MC_BEGIN(0, 2, 0);
2283 IEM_MC_LOCAL(uint64_t, uSrc);
2284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2285
2286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2288 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2289 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2290
2291 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2292 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2293
2294 IEM_MC_ADVANCE_RIP_AND_FINISH();
2295 IEM_MC_END();
2296 }
2297}
2298
2299
2300/**
2301 * @opcode 0x11
2302 * @oppfx none
2303 * @opcpuid sse
2304 * @opgroup og_sse_simdfp_datamove
2305 * @opxcpttype 4UA
2306 * @optest op1=1 op2=2 -> op1=2
2307 * @optest op1=0 op2=-42 -> op1=-42
2308 */
2309FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2310{
2311 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2312 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2313 if (IEM_IS_MODRM_REG_MODE(bRm))
2314 {
2315 /*
2316 * XMM128, XMM128.
2317 */
2318 IEM_MC_BEGIN(0, 0, 0);
2319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2320 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2321 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2322 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2323 IEM_GET_MODRM_REG(pVCpu, bRm));
2324 IEM_MC_ADVANCE_RIP_AND_FINISH();
2325 IEM_MC_END();
2326 }
2327 else
2328 {
2329 /*
2330 * [mem128], XMM128.
2331 */
2332 IEM_MC_BEGIN(0, 2, 0);
2333 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2335
2336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2338 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2339 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2340
2341 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2342 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2343
2344 IEM_MC_ADVANCE_RIP_AND_FINISH();
2345 IEM_MC_END();
2346 }
2347}
2348
2349
2350/**
2351 * @opcode 0x11
2352 * @oppfx 0x66
2353 * @opcpuid sse2
2354 * @opgroup og_sse2_pcksclr_datamove
2355 * @opxcpttype 4UA
2356 * @optest op1=1 op2=2 -> op1=2
2357 * @optest op1=0 op2=-42 -> op1=-42
2358 */
2359FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2360{
2361 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2362 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2363 if (IEM_IS_MODRM_REG_MODE(bRm))
2364 {
2365 /*
2366 * XMM128, XMM128.
2367 */
2368 IEM_MC_BEGIN(0, 0, 0);
2369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2370 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2371 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2372 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2373 IEM_GET_MODRM_REG(pVCpu, bRm));
2374 IEM_MC_ADVANCE_RIP_AND_FINISH();
2375 IEM_MC_END();
2376 }
2377 else
2378 {
2379 /*
2380 * [mem128], XMM128.
2381 */
2382 IEM_MC_BEGIN(0, 2, 0);
2383 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2385
2386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2388 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2389 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2390
2391 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2392 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2393
2394 IEM_MC_ADVANCE_RIP_AND_FINISH();
2395 IEM_MC_END();
2396 }
2397}
2398
2399
2400/**
2401 * @opcode 0x11
2402 * @oppfx 0xf3
2403 * @opcpuid sse
2404 * @opgroup og_sse_simdfp_datamove
2405 * @opxcpttype 5
2406 * @optest op1=1 op2=2 -> op1=2
2407 * @optest op1=0 op2=-22 -> op1=-22
2408 */
2409FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2410{
2411 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2412 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2413 if (IEM_IS_MODRM_REG_MODE(bRm))
2414 {
2415 /*
2416 * XMM32, XMM32.
2417 */
2418 IEM_MC_BEGIN(0, 1, 0);
2419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2420 IEM_MC_LOCAL(uint32_t, uSrc);
2421
2422 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2423 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2424 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2425 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2426
2427 IEM_MC_ADVANCE_RIP_AND_FINISH();
2428 IEM_MC_END();
2429 }
2430 else
2431 {
2432 /*
2433 * [mem32], XMM32.
2434 */
2435 IEM_MC_BEGIN(0, 2, 0);
2436 IEM_MC_LOCAL(uint32_t, uSrc);
2437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2438
2439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2441 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2442 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2443
2444 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2445 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2446
2447 IEM_MC_ADVANCE_RIP_AND_FINISH();
2448 IEM_MC_END();
2449 }
2450}
2451
2452
2453/**
2454 * @opcode 0x11
2455 * @oppfx 0xf2
2456 * @opcpuid sse2
2457 * @opgroup og_sse2_pcksclr_datamove
2458 * @opxcpttype 5
2459 * @optest op1=1 op2=2 -> op1=2
2460 * @optest op1=0 op2=-42 -> op1=-42
2461 */
2462FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2463{
2464 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2465 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2466 if (IEM_IS_MODRM_REG_MODE(bRm))
2467 {
2468 /*
2469 * XMM64, XMM64.
2470 */
2471 IEM_MC_BEGIN(0, 1, 0);
2472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2473 IEM_MC_LOCAL(uint64_t, uSrc);
2474
2475 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2476 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2477 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2478 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2479
2480 IEM_MC_ADVANCE_RIP_AND_FINISH();
2481 IEM_MC_END();
2482 }
2483 else
2484 {
2485 /*
2486 * [mem64], XMM64.
2487 */
2488 IEM_MC_BEGIN(0, 2, 0);
2489 IEM_MC_LOCAL(uint64_t, uSrc);
2490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2491
2492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2494 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2495 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2496
2497 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2498 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2499
2500 IEM_MC_ADVANCE_RIP_AND_FINISH();
2501 IEM_MC_END();
2502 }
2503}
2504
2505
2506FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2507{
2508 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2509 if (IEM_IS_MODRM_REG_MODE(bRm))
2510 {
2511 /**
2512 * @opcode 0x12
2513 * @opcodesub 11 mr/reg
2514 * @oppfx none
2515 * @opcpuid sse
2516 * @opgroup og_sse_simdfp_datamove
2517 * @opxcpttype 5
2518 * @optest op1=1 op2=2 -> op1=2
2519 * @optest op1=0 op2=-42 -> op1=-42
2520 */
2521 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2522
2523 IEM_MC_BEGIN(0, 1, 0);
2524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2525 IEM_MC_LOCAL(uint64_t, uSrc);
2526
2527 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2528 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2529 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2530 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2531
2532 IEM_MC_ADVANCE_RIP_AND_FINISH();
2533 IEM_MC_END();
2534 }
2535 else
2536 {
2537 /**
2538 * @opdone
2539 * @opcode 0x12
2540 * @opcodesub !11 mr/reg
2541 * @oppfx none
2542 * @opcpuid sse
2543 * @opgroup og_sse_simdfp_datamove
2544 * @opxcpttype 5
2545 * @optest op1=1 op2=2 -> op1=2
2546 * @optest op1=0 op2=-42 -> op1=-42
2547 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2548 */
2549 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2550
2551 IEM_MC_BEGIN(0, 2, 0);
2552 IEM_MC_LOCAL(uint64_t, uSrc);
2553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2554
2555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2557 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2558 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2559
2560 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2561 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2562
2563 IEM_MC_ADVANCE_RIP_AND_FINISH();
2564 IEM_MC_END();
2565 }
2566}
2567
2568
2569/**
2570 * @opcode 0x12
2571 * @opcodesub !11 mr/reg
2572 * @oppfx 0x66
2573 * @opcpuid sse2
2574 * @opgroup og_sse2_pcksclr_datamove
2575 * @opxcpttype 5
2576 * @optest op1=1 op2=2 -> op1=2
2577 * @optest op1=0 op2=-42 -> op1=-42
2578 */
2579FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2580{
2581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2582 if (IEM_IS_MODRM_MEM_MODE(bRm))
2583 {
2584 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2585
2586 IEM_MC_BEGIN(0, 2, 0);
2587 IEM_MC_LOCAL(uint64_t, uSrc);
2588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2589
2590 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2592 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2593 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2594
2595 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2596 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2597
2598 IEM_MC_ADVANCE_RIP_AND_FINISH();
2599 IEM_MC_END();
2600 }
2601
2602 /**
2603 * @opdone
2604 * @opmnemonic ud660f12m3
2605 * @opcode 0x12
2606 * @opcodesub 11 mr/reg
2607 * @oppfx 0x66
2608 * @opunused immediate
2609 * @opcpuid sse
2610 * @optest ->
2611 */
2612 else
2613 IEMOP_RAISE_INVALID_OPCODE_RET();
2614}
2615
2616
2617/**
2618 * @opcode 0x12
2619 * @oppfx 0xf3
2620 * @opcpuid sse3
2621 * @opgroup og_sse3_pcksclr_datamove
2622 * @opxcpttype 4
2623 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2624 * op1=0x00000002000000020000000100000001
2625 */
2626FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2627{
2628 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2630 if (IEM_IS_MODRM_REG_MODE(bRm))
2631 {
2632 /*
2633 * XMM, XMM.
2634 */
2635 IEM_MC_BEGIN(0, 1, 0);
2636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2637 IEM_MC_LOCAL(RTUINT128U, uSrc);
2638
2639 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2640 IEM_MC_PREPARE_SSE_USAGE();
2641
2642 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2643 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2644 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2645 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2646 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2647
2648 IEM_MC_ADVANCE_RIP_AND_FINISH();
2649 IEM_MC_END();
2650 }
2651 else
2652 {
2653 /*
2654 * XMM, [mem128].
2655 */
2656 IEM_MC_BEGIN(0, 2, 0);
2657 IEM_MC_LOCAL(RTUINT128U, uSrc);
2658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2659
2660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2662 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2663 IEM_MC_PREPARE_SSE_USAGE();
2664
2665 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2666 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2667 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2668 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2669 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2670
2671 IEM_MC_ADVANCE_RIP_AND_FINISH();
2672 IEM_MC_END();
2673 }
2674}
2675
2676
2677/**
2678 * @opcode 0x12
2679 * @oppfx 0xf2
2680 * @opcpuid sse3
2681 * @opgroup og_sse3_pcksclr_datamove
2682 * @opxcpttype 5
2683 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2684 * op1=0x22222222111111112222222211111111
2685 */
2686FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2687{
2688 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2690 if (IEM_IS_MODRM_REG_MODE(bRm))
2691 {
2692 /*
2693 * XMM128, XMM64.
2694 */
2695 IEM_MC_BEGIN(1, 0, 0);
2696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2697 IEM_MC_ARG(uint64_t, uSrc, 0);
2698
2699 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2700 IEM_MC_PREPARE_SSE_USAGE();
2701
2702 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2703 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2704 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2705
2706 IEM_MC_ADVANCE_RIP_AND_FINISH();
2707 IEM_MC_END();
2708 }
2709 else
2710 {
2711 /*
2712 * XMM128, [mem64].
2713 */
2714 IEM_MC_BEGIN(1, 1, 0);
2715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2716 IEM_MC_ARG(uint64_t, uSrc, 0);
2717
2718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2720 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2721 IEM_MC_PREPARE_SSE_USAGE();
2722
2723 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2724 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2725 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2726
2727 IEM_MC_ADVANCE_RIP_AND_FINISH();
2728 IEM_MC_END();
2729 }
2730}
2731
2732
2733/**
2734 * @opcode 0x13
2735 * @opcodesub !11 mr/reg
2736 * @oppfx none
2737 * @opcpuid sse
2738 * @opgroup og_sse_simdfp_datamove
2739 * @opxcpttype 5
2740 * @optest op1=1 op2=2 -> op1=2
2741 * @optest op1=0 op2=-42 -> op1=-42
2742 */
2743FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2744{
2745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2746 if (IEM_IS_MODRM_MEM_MODE(bRm))
2747 {
2748 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2749
2750 IEM_MC_BEGIN(0, 2, 0);
2751 IEM_MC_LOCAL(uint64_t, uSrc);
2752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2753
2754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2756 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2757 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2758
2759 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2760 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2761
2762 IEM_MC_ADVANCE_RIP_AND_FINISH();
2763 IEM_MC_END();
2764 }
2765
2766 /**
2767 * @opdone
2768 * @opmnemonic ud0f13m3
2769 * @opcode 0x13
2770 * @opcodesub 11 mr/reg
2771 * @oppfx none
2772 * @opunused immediate
2773 * @opcpuid sse
2774 * @optest ->
2775 */
2776 else
2777 IEMOP_RAISE_INVALID_OPCODE_RET();
2778}
2779
2780
2781/**
2782 * @opcode 0x13
2783 * @opcodesub !11 mr/reg
2784 * @oppfx 0x66
2785 * @opcpuid sse2
2786 * @opgroup og_sse2_pcksclr_datamove
2787 * @opxcpttype 5
2788 * @optest op1=1 op2=2 -> op1=2
2789 * @optest op1=0 op2=-42 -> op1=-42
2790 */
2791FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2792{
2793 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2794 if (IEM_IS_MODRM_MEM_MODE(bRm))
2795 {
2796 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2797
2798 IEM_MC_BEGIN(0, 2, 0);
2799 IEM_MC_LOCAL(uint64_t, uSrc);
2800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2801
2802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2804 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2805 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2806
2807 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2808 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2809
2810 IEM_MC_ADVANCE_RIP_AND_FINISH();
2811 IEM_MC_END();
2812 }
2813
2814 /**
2815 * @opdone
2816 * @opmnemonic ud660f13m3
2817 * @opcode 0x13
2818 * @opcodesub 11 mr/reg
2819 * @oppfx 0x66
2820 * @opunused immediate
2821 * @opcpuid sse
2822 * @optest ->
2823 */
2824 else
2825 IEMOP_RAISE_INVALID_OPCODE_RET();
2826}
2827
2828
2829/**
2830 * @opmnemonic udf30f13
2831 * @opcode 0x13
2832 * @oppfx 0xf3
2833 * @opunused intel-modrm
2834 * @opcpuid sse
2835 * @optest ->
2836 * @opdone
2837 */
2838
2839/**
2840 * @opmnemonic udf20f13
2841 * @opcode 0x13
2842 * @oppfx 0xf2
2843 * @opunused intel-modrm
2844 * @opcpuid sse
2845 * @optest ->
2846 * @opdone
2847 */
2848
2849/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2850FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2851{
2852 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2853 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2854}
2855
2856
2857/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2858FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2859{
2860 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2861 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2862}
2863
2864
2865/**
2866 * @opdone
2867 * @opmnemonic udf30f14
2868 * @opcode 0x14
2869 * @oppfx 0xf3
2870 * @opunused intel-modrm
2871 * @opcpuid sse
2872 * @optest ->
2873 * @opdone
2874 */
2875
2876/**
2877 * @opmnemonic udf20f14
2878 * @opcode 0x14
2879 * @oppfx 0xf2
2880 * @opunused intel-modrm
2881 * @opcpuid sse
2882 * @optest ->
2883 * @opdone
2884 */
2885
2886/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2887FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2888{
2889 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2890 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2891}
2892
2893
2894/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2895FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2896{
2897 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2898 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2899}
2900
2901
2902/* Opcode 0xf3 0x0f 0x15 - invalid */
2903/* Opcode 0xf2 0x0f 0x15 - invalid */
2904
2905/**
2906 * @opdone
2907 * @opmnemonic udf30f15
2908 * @opcode 0x15
2909 * @oppfx 0xf3
2910 * @opunused intel-modrm
2911 * @opcpuid sse
2912 * @optest ->
2913 * @opdone
2914 */
2915
2916/**
2917 * @opmnemonic udf20f15
2918 * @opcode 0x15
2919 * @oppfx 0xf2
2920 * @opunused intel-modrm
2921 * @opcpuid sse
2922 * @optest ->
2923 * @opdone
2924 */
2925
2926FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2927{
2928 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2929 if (IEM_IS_MODRM_REG_MODE(bRm))
2930 {
2931 /**
2932 * @opcode 0x16
2933 * @opcodesub 11 mr/reg
2934 * @oppfx none
2935 * @opcpuid sse
2936 * @opgroup og_sse_simdfp_datamove
2937 * @opxcpttype 5
2938 * @optest op1=1 op2=2 -> op1=2
2939 * @optest op1=0 op2=-42 -> op1=-42
2940 */
2941 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2942
2943 IEM_MC_BEGIN(0, 1, 0);
2944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2945 IEM_MC_LOCAL(uint64_t, uSrc);
2946
2947 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2948 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2949 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2950 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2951
2952 IEM_MC_ADVANCE_RIP_AND_FINISH();
2953 IEM_MC_END();
2954 }
2955 else
2956 {
2957 /**
2958 * @opdone
2959 * @opcode 0x16
2960 * @opcodesub !11 mr/reg
2961 * @oppfx none
2962 * @opcpuid sse
2963 * @opgroup og_sse_simdfp_datamove
2964 * @opxcpttype 5
2965 * @optest op1=1 op2=2 -> op1=2
2966 * @optest op1=0 op2=-42 -> op1=-42
2967 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2968 */
2969 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2970
2971 IEM_MC_BEGIN(0, 2, 0);
2972 IEM_MC_LOCAL(uint64_t, uSrc);
2973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2974
2975 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2977 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2978 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2979
2980 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2981 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2982
2983 IEM_MC_ADVANCE_RIP_AND_FINISH();
2984 IEM_MC_END();
2985 }
2986}
2987
2988
2989/**
2990 * @opcode 0x16
2991 * @opcodesub !11 mr/reg
2992 * @oppfx 0x66
2993 * @opcpuid sse2
2994 * @opgroup og_sse2_pcksclr_datamove
2995 * @opxcpttype 5
2996 * @optest op1=1 op2=2 -> op1=2
2997 * @optest op1=0 op2=-42 -> op1=-42
2998 */
2999FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
3000{
3001 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3002 if (IEM_IS_MODRM_MEM_MODE(bRm))
3003 {
3004 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3005
3006 IEM_MC_BEGIN(0, 2, 0);
3007 IEM_MC_LOCAL(uint64_t, uSrc);
3008 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3009
3010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3013 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3014
3015 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3016 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3017
3018 IEM_MC_ADVANCE_RIP_AND_FINISH();
3019 IEM_MC_END();
3020 }
3021
3022 /**
3023 * @opdone
3024 * @opmnemonic ud660f16m3
3025 * @opcode 0x16
3026 * @opcodesub 11 mr/reg
3027 * @oppfx 0x66
3028 * @opunused immediate
3029 * @opcpuid sse
3030 * @optest ->
3031 */
3032 else
3033 IEMOP_RAISE_INVALID_OPCODE_RET();
3034}
3035
3036
3037/**
3038 * @opcode 0x16
3039 * @oppfx 0xf3
3040 * @opcpuid sse3
3041 * @opgroup og_sse3_pcksclr_datamove
3042 * @opxcpttype 4
3043 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3044 * op1=0x00000002000000020000000100000001
3045 */
3046FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3047{
3048 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3049 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3050 if (IEM_IS_MODRM_REG_MODE(bRm))
3051 {
3052 /*
3053 * XMM128, XMM128.
3054 */
3055 IEM_MC_BEGIN(0, 1, 0);
3056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3057 IEM_MC_LOCAL(RTUINT128U, uSrc);
3058
3059 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3060 IEM_MC_PREPARE_SSE_USAGE();
3061
3062 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3063 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3064 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3065 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3066 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3067
3068 IEM_MC_ADVANCE_RIP_AND_FINISH();
3069 IEM_MC_END();
3070 }
3071 else
3072 {
3073 /*
3074 * XMM128, [mem128].
3075 */
3076 IEM_MC_BEGIN(0, 2, 0);
3077 IEM_MC_LOCAL(RTUINT128U, uSrc);
3078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3079
3080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3082 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3083 IEM_MC_PREPARE_SSE_USAGE();
3084
3085 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3086 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3087 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3088 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3089 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3090
3091 IEM_MC_ADVANCE_RIP_AND_FINISH();
3092 IEM_MC_END();
3093 }
3094}
3095
3096/**
3097 * @opdone
3098 * @opmnemonic udf30f16
3099 * @opcode 0x16
3100 * @oppfx 0xf2
3101 * @opunused intel-modrm
3102 * @opcpuid sse
3103 * @optest ->
3104 * @opdone
3105 */
3106
3107
3108/**
3109 * @opcode 0x17
3110 * @opcodesub !11 mr/reg
3111 * @oppfx none
3112 * @opcpuid sse
3113 * @opgroup og_sse_simdfp_datamove
3114 * @opxcpttype 5
3115 * @optest op1=1 op2=2 -> op1=2
3116 * @optest op1=0 op2=-42 -> op1=-42
3117 */
3118FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3119{
3120 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3121 if (IEM_IS_MODRM_MEM_MODE(bRm))
3122 {
3123 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3124
3125 IEM_MC_BEGIN(0, 2, 0);
3126 IEM_MC_LOCAL(uint64_t, uSrc);
3127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3128
3129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3131 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3132 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3133
3134 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3135 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3136
3137 IEM_MC_ADVANCE_RIP_AND_FINISH();
3138 IEM_MC_END();
3139 }
3140
3141 /**
3142 * @opdone
3143 * @opmnemonic ud0f17m3
3144 * @opcode 0x17
3145 * @opcodesub 11 mr/reg
3146 * @oppfx none
3147 * @opunused immediate
3148 * @opcpuid sse
3149 * @optest ->
3150 */
3151 else
3152 IEMOP_RAISE_INVALID_OPCODE_RET();
3153}
3154
3155
3156/**
3157 * @opcode 0x17
3158 * @opcodesub !11 mr/reg
3159 * @oppfx 0x66
3160 * @opcpuid sse2
3161 * @opgroup og_sse2_pcksclr_datamove
3162 * @opxcpttype 5
3163 * @optest op1=1 op2=2 -> op1=2
3164 * @optest op1=0 op2=-42 -> op1=-42
3165 */
3166FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3167{
3168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3169 if (IEM_IS_MODRM_MEM_MODE(bRm))
3170 {
3171 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3172
3173 IEM_MC_BEGIN(0, 2, 0);
3174 IEM_MC_LOCAL(uint64_t, uSrc);
3175 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3176
3177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3179 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3180 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3181
3182 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3183 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3184
3185 IEM_MC_ADVANCE_RIP_AND_FINISH();
3186 IEM_MC_END();
3187 }
3188
3189 /**
3190 * @opdone
3191 * @opmnemonic ud660f17m3
3192 * @opcode 0x17
3193 * @opcodesub 11 mr/reg
3194 * @oppfx 0x66
3195 * @opunused immediate
3196 * @opcpuid sse
3197 * @optest ->
3198 */
3199 else
3200 IEMOP_RAISE_INVALID_OPCODE_RET();
3201}
3202
3203
3204/**
3205 * @opdone
3206 * @opmnemonic udf30f17
3207 * @opcode 0x17
3208 * @oppfx 0xf3
3209 * @opunused intel-modrm
3210 * @opcpuid sse
3211 * @optest ->
3212 * @opdone
3213 */
3214
3215/**
3216 * @opmnemonic udf20f17
3217 * @opcode 0x17
3218 * @oppfx 0xf2
3219 * @opunused intel-modrm
3220 * @opcpuid sse
3221 * @optest ->
3222 * @opdone
3223 */
3224
3225
3226/** Opcode 0x0f 0x18. */
3227FNIEMOP_DEF(iemOp_prefetch_Grp16)
3228{
3229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3230 if (IEM_IS_MODRM_MEM_MODE(bRm))
3231 {
3232 switch (IEM_GET_MODRM_REG_8(bRm))
3233 {
3234 case 4: /* Aliased to /0 for the time being according to AMD. */
3235 case 5: /* Aliased to /0 for the time being according to AMD. */
3236 case 6: /* Aliased to /0 for the time being according to AMD. */
3237 case 7: /* Aliased to /0 for the time being according to AMD. */
3238 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3239 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3240 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3241 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3243 }
3244
3245 IEM_MC_BEGIN(0, 1, 0);
3246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3249 /* Currently a NOP. */
3250 NOREF(GCPtrEffSrc);
3251 IEM_MC_ADVANCE_RIP_AND_FINISH();
3252 IEM_MC_END();
3253 }
3254 else
3255 IEMOP_RAISE_INVALID_OPCODE_RET();
3256}
3257
3258
3259/** Opcode 0x0f 0x19..0x1f. */
3260FNIEMOP_DEF(iemOp_nop_Ev)
3261{
3262 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3263 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3264 if (IEM_IS_MODRM_REG_MODE(bRm))
3265 {
3266 IEM_MC_BEGIN(0, 0, 0);
3267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3268 IEM_MC_ADVANCE_RIP_AND_FINISH();
3269 IEM_MC_END();
3270 }
3271 else
3272 {
3273 IEM_MC_BEGIN(0, 1, 0);
3274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3277 /* Currently a NOP. */
3278 NOREF(GCPtrEffSrc);
3279 IEM_MC_ADVANCE_RIP_AND_FINISH();
3280 IEM_MC_END();
3281 }
3282}
3283
3284
3285/** Opcode 0x0f 0x20. */
3286FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3287{
3288 /* mod is ignored, as is operand size overrides. */
3289 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3290 IEMOP_HLP_MIN_386();
3291 if (IEM_IS_64BIT_CODE(pVCpu))
3292 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3293 else
3294 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3295
3296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3297 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3298 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3299 {
3300 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3301 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3302 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3303 iCrReg |= 8;
3304 }
3305 switch (iCrReg)
3306 {
3307 case 0: case 2: case 3: case 4: case 8:
3308 break;
3309 default:
3310 IEMOP_RAISE_INVALID_OPCODE_RET();
3311 }
3312 IEMOP_HLP_DONE_DECODING();
3313
3314 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3315}
3316
3317
3318/** Opcode 0x0f 0x21. */
3319FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3320{
3321 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3322 IEMOP_HLP_MIN_386();
3323 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3325 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3326 IEMOP_RAISE_INVALID_OPCODE_RET();
3327 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_mov_Rd_Dd, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3328}
3329
3330
3331/** Opcode 0x0f 0x22. */
3332FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3333{
3334 /* mod is ignored, as is operand size overrides. */
3335 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3336 IEMOP_HLP_MIN_386();
3337 if (IEM_IS_64BIT_CODE(pVCpu))
3338 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3339 else
3340 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3341
3342 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3343 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3344 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3345 {
3346 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3347 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3348 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3349 iCrReg |= 8;
3350 }
3351 switch (iCrReg)
3352 {
3353 case 0: case 2: case 3: case 4: case 8:
3354 break;
3355 default:
3356 IEMOP_RAISE_INVALID_OPCODE_RET();
3357 }
3358 IEMOP_HLP_DONE_DECODING();
3359
3360 if (iCrReg & (2 | 8))
3361 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3362 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3363 else
3364 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT,
3365 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3366}
3367
3368
3369/** Opcode 0x0f 0x23. */
3370FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3371{
3372 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3373 IEMOP_HLP_MIN_386();
3374 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3376 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3377 IEMOP_RAISE_INVALID_OPCODE_RET();
3378 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_mov_Dd_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3379}
3380
3381
3382/** Opcode 0x0f 0x24. */
3383FNIEMOP_DEF(iemOp_mov_Rd_Td)
3384{
3385 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3386 IEMOP_HLP_MIN_386();
3387 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3389 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3390 IEMOP_RAISE_INVALID_OPCODE_RET();
3391 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_mov_Rd_Td, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3392}
3393
3394
3395/** Opcode 0x0f 0x26. */
3396FNIEMOP_DEF(iemOp_mov_Td_Rd)
3397{
3398 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3399 IEMOP_HLP_MIN_386();
3400 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3402 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3403 IEMOP_RAISE_INVALID_OPCODE_RET();
3404 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_mov_Td_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3405}
3406
3407
3408/**
3409 * @opcode 0x28
3410 * @oppfx none
3411 * @opcpuid sse
3412 * @opgroup og_sse_simdfp_datamove
3413 * @opxcpttype 1
3414 * @optest op1=1 op2=2 -> op1=2
3415 * @optest op1=0 op2=-42 -> op1=-42
3416 */
3417FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3418{
3419 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3421 if (IEM_IS_MODRM_REG_MODE(bRm))
3422 {
3423 /*
3424 * Register, register.
3425 */
3426 IEM_MC_BEGIN(0, 0, 0);
3427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3428 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3429 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3430 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3431 IEM_GET_MODRM_RM(pVCpu, bRm));
3432 IEM_MC_ADVANCE_RIP_AND_FINISH();
3433 IEM_MC_END();
3434 }
3435 else
3436 {
3437 /*
3438 * Register, memory.
3439 */
3440 IEM_MC_BEGIN(0, 2, 0);
3441 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3442 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3443
3444 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3446 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3447 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3448
3449 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3450 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3451
3452 IEM_MC_ADVANCE_RIP_AND_FINISH();
3453 IEM_MC_END();
3454 }
3455}
3456
3457/**
3458 * @opcode 0x28
3459 * @oppfx 66
3460 * @opcpuid sse2
3461 * @opgroup og_sse2_pcksclr_datamove
3462 * @opxcpttype 1
3463 * @optest op1=1 op2=2 -> op1=2
3464 * @optest op1=0 op2=-42 -> op1=-42
3465 */
3466FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3467{
3468 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3470 if (IEM_IS_MODRM_REG_MODE(bRm))
3471 {
3472 /*
3473 * Register, register.
3474 */
3475 IEM_MC_BEGIN(0, 0, 0);
3476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3477 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3478 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3479 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3480 IEM_GET_MODRM_RM(pVCpu, bRm));
3481 IEM_MC_ADVANCE_RIP_AND_FINISH();
3482 IEM_MC_END();
3483 }
3484 else
3485 {
3486 /*
3487 * Register, memory.
3488 */
3489 IEM_MC_BEGIN(0, 2, 0);
3490 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3492
3493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3495 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3496 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3497
3498 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3499 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3500
3501 IEM_MC_ADVANCE_RIP_AND_FINISH();
3502 IEM_MC_END();
3503 }
3504}
3505
3506/* Opcode 0xf3 0x0f 0x28 - invalid */
3507/* Opcode 0xf2 0x0f 0x28 - invalid */
3508
3509/**
3510 * @opcode 0x29
3511 * @oppfx none
3512 * @opcpuid sse
3513 * @opgroup og_sse_simdfp_datamove
3514 * @opxcpttype 1
3515 * @optest op1=1 op2=2 -> op1=2
3516 * @optest op1=0 op2=-42 -> op1=-42
3517 */
3518FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3519{
3520 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3522 if (IEM_IS_MODRM_REG_MODE(bRm))
3523 {
3524 /*
3525 * Register, register.
3526 */
3527 IEM_MC_BEGIN(0, 0, 0);
3528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3529 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3530 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3531 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3532 IEM_GET_MODRM_REG(pVCpu, bRm));
3533 IEM_MC_ADVANCE_RIP_AND_FINISH();
3534 IEM_MC_END();
3535 }
3536 else
3537 {
3538 /*
3539 * Memory, register.
3540 */
3541 IEM_MC_BEGIN(0, 2, 0);
3542 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3544
3545 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3547 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3548 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3549
3550 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3551 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3552
3553 IEM_MC_ADVANCE_RIP_AND_FINISH();
3554 IEM_MC_END();
3555 }
3556}
3557
3558/**
3559 * @opcode 0x29
3560 * @oppfx 66
3561 * @opcpuid sse2
3562 * @opgroup og_sse2_pcksclr_datamove
3563 * @opxcpttype 1
3564 * @optest op1=1 op2=2 -> op1=2
3565 * @optest op1=0 op2=-42 -> op1=-42
3566 */
3567FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3568{
3569 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3570 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3571 if (IEM_IS_MODRM_REG_MODE(bRm))
3572 {
3573 /*
3574 * Register, register.
3575 */
3576 IEM_MC_BEGIN(0, 0, 0);
3577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3578 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3579 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3580 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3581 IEM_GET_MODRM_REG(pVCpu, bRm));
3582 IEM_MC_ADVANCE_RIP_AND_FINISH();
3583 IEM_MC_END();
3584 }
3585 else
3586 {
3587 /*
3588 * Memory, register.
3589 */
3590 IEM_MC_BEGIN(0, 2, 0);
3591 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3593
3594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3596 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3597 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3598
3599 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3600 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3601
3602 IEM_MC_ADVANCE_RIP_AND_FINISH();
3603 IEM_MC_END();
3604 }
3605}
3606
3607/* Opcode 0xf3 0x0f 0x29 - invalid */
3608/* Opcode 0xf2 0x0f 0x29 - invalid */
3609
3610
3611/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3612FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3613{
3614 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3615 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3616 if (IEM_IS_MODRM_REG_MODE(bRm))
3617 {
3618 /*
3619 * XMM, MMX
3620 */
3621 IEM_MC_BEGIN(3, 1, 0);
3622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3623 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3624 IEM_MC_LOCAL(X86XMMREG, Dst);
3625 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3626 IEM_MC_ARG(uint64_t, u64Src, 2);
3627 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3628 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3629 IEM_MC_PREPARE_FPU_USAGE();
3630 IEM_MC_FPU_TO_MMX_MODE();
3631
3632 IEM_MC_REF_MXCSR(pfMxcsr);
3633 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3634 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3635
3636 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3637 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3638 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3639 } IEM_MC_ELSE() {
3640 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3641 } IEM_MC_ENDIF();
3642
3643 IEM_MC_ADVANCE_RIP_AND_FINISH();
3644 IEM_MC_END();
3645 }
3646 else
3647 {
3648 /*
3649 * XMM, [mem64]
3650 */
3651 IEM_MC_BEGIN(3, 2, 0);
3652 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3653 IEM_MC_LOCAL(X86XMMREG, Dst);
3654 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3655 IEM_MC_ARG(uint64_t, u64Src, 2);
3656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3657
3658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3660 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3661 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3662 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3663
3664 IEM_MC_PREPARE_FPU_USAGE();
3665 IEM_MC_FPU_TO_MMX_MODE();
3666 IEM_MC_REF_MXCSR(pfMxcsr);
3667
3668 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3669 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3670 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3671 } IEM_MC_ELSE() {
3672 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3673 } IEM_MC_ENDIF();
3674
3675 IEM_MC_ADVANCE_RIP_AND_FINISH();
3676 IEM_MC_END();
3677 }
3678}
3679
3680
3681/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3682FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3683{
3684 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3685 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3686 if (IEM_IS_MODRM_REG_MODE(bRm))
3687 {
3688 /*
3689 * XMM, MMX
3690 */
3691 IEM_MC_BEGIN(3, 1, 0);
3692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3693 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3694 IEM_MC_LOCAL(X86XMMREG, Dst);
3695 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3696 IEM_MC_ARG(uint64_t, u64Src, 2);
3697 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3698 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3699 IEM_MC_PREPARE_FPU_USAGE();
3700 IEM_MC_FPU_TO_MMX_MODE();
3701
3702 IEM_MC_REF_MXCSR(pfMxcsr);
3703 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3704
3705 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3706 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3707 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3708 } IEM_MC_ELSE() {
3709 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3710 } IEM_MC_ENDIF();
3711
3712 IEM_MC_ADVANCE_RIP_AND_FINISH();
3713 IEM_MC_END();
3714 }
3715 else
3716 {
3717 /*
3718 * XMM, [mem64]
3719 */
3720 IEM_MC_BEGIN(3, 3, 0);
3721 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3722 IEM_MC_LOCAL(X86XMMREG, Dst);
3723 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3724 IEM_MC_ARG(uint64_t, u64Src, 2);
3725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3726
3727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3729 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3730 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3731 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3732
3733 /* Doesn't cause a transition to MMX mode. */
3734 IEM_MC_PREPARE_SSE_USAGE();
3735 IEM_MC_REF_MXCSR(pfMxcsr);
3736
3737 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3738 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3739 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3740 } IEM_MC_ELSE() {
3741 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3742 } IEM_MC_ENDIF();
3743
3744 IEM_MC_ADVANCE_RIP_AND_FINISH();
3745 IEM_MC_END();
3746 }
3747}
3748
3749
3750/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3751FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3752{
3753 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3754
3755 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3756 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3757 {
3758 if (IEM_IS_MODRM_REG_MODE(bRm))
3759 {
3760 /* XMM, greg64 */
3761 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT);
3762 IEM_MC_LOCAL(uint32_t, fMxcsr);
3763 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3764 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3765 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3766 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3767
3768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3769 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3770 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3771
3772 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3773 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3774 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3775 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3776 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3777 } IEM_MC_ELSE() {
3778 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3779 } IEM_MC_ENDIF();
3780
3781 IEM_MC_ADVANCE_RIP_AND_FINISH();
3782 IEM_MC_END();
3783 }
3784 else
3785 {
3786 /* XMM, [mem64] */
3787 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT);
3788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3789 IEM_MC_LOCAL(uint32_t, fMxcsr);
3790 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3791 IEM_MC_LOCAL(int64_t, i64Src);
3792 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3793 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3794 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3795
3796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3798 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3799 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3800
3801 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3802 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3803 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3804 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3805 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3806 } IEM_MC_ELSE() {
3807 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3808 } IEM_MC_ENDIF();
3809
3810 IEM_MC_ADVANCE_RIP_AND_FINISH();
3811 IEM_MC_END();
3812 }
3813 }
3814 else
3815 {
3816 if (IEM_IS_MODRM_REG_MODE(bRm))
3817 {
3818 /* greg, XMM */
3819 IEM_MC_BEGIN(3, 2, 0);
3820 IEM_MC_LOCAL(uint32_t, fMxcsr);
3821 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3822 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3823 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3824 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3825
3826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3827 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3828 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3829
3830 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3831 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3832 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3833 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3834 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3835 } IEM_MC_ELSE() {
3836 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3837 } IEM_MC_ENDIF();
3838
3839 IEM_MC_ADVANCE_RIP_AND_FINISH();
3840 IEM_MC_END();
3841 }
3842 else
3843 {
3844 /* greg, [mem32] */
3845 IEM_MC_BEGIN(3, 4, 0);
3846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3847 IEM_MC_LOCAL(uint32_t, fMxcsr);
3848 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3849 IEM_MC_LOCAL(int32_t, i32Src);
3850 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3851 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3852 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3853
3854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3856 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3857 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3858
3859 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3860 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3861 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3862 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3863 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3864 } IEM_MC_ELSE() {
3865 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3866 } IEM_MC_ENDIF();
3867
3868 IEM_MC_ADVANCE_RIP_AND_FINISH();
3869 IEM_MC_END();
3870 }
3871 }
3872}
3873
3874
3875/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3876FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3877{
3878 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3879
3880 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3881 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3882 {
3883 if (IEM_IS_MODRM_REG_MODE(bRm))
3884 {
3885 /* XMM, greg64 */
3886 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT);
3887 IEM_MC_LOCAL(uint32_t, fMxcsr);
3888 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3889 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3890 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3891 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3892
3893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3894 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3895 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3896
3897 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3898 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3899 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3900 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3901 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3902 } IEM_MC_ELSE() {
3903 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3904 } IEM_MC_ENDIF();
3905
3906 IEM_MC_ADVANCE_RIP_AND_FINISH();
3907 IEM_MC_END();
3908 }
3909 else
3910 {
3911 /* XMM, [mem64] */
3912 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT);
3913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3914 IEM_MC_LOCAL(uint32_t, fMxcsr);
3915 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3916 IEM_MC_LOCAL(int64_t, i64Src);
3917 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3918 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3919 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3920
3921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3923 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3924 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3925
3926 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3927 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3928 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3929 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3930 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3931 } IEM_MC_ELSE() {
3932 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3933 } IEM_MC_ENDIF();
3934
3935 IEM_MC_ADVANCE_RIP_AND_FINISH();
3936 IEM_MC_END();
3937 }
3938 }
3939 else
3940 {
3941 if (IEM_IS_MODRM_REG_MODE(bRm))
3942 {
3943 /* XMM, greg32 */
3944 IEM_MC_BEGIN(3, 2, 0);
3945 IEM_MC_LOCAL(uint32_t, fMxcsr);
3946 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3947 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3948 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3949 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3950
3951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3952 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3953 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3954
3955 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3956 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3957 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3958 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3959 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3960 } IEM_MC_ELSE() {
3961 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3962 } IEM_MC_ENDIF();
3963
3964 IEM_MC_ADVANCE_RIP_AND_FINISH();
3965 IEM_MC_END();
3966 }
3967 else
3968 {
3969 /* XMM, [mem32] */
3970 IEM_MC_BEGIN(3, 4, 0);
3971 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3972 IEM_MC_LOCAL(uint32_t, fMxcsr);
3973 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3974 IEM_MC_LOCAL(int32_t, i32Src);
3975 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3976 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3977 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3978
3979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3981 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3982 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3983
3984 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3985 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3986 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3987 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3988 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3989 } IEM_MC_ELSE() {
3990 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3991 } IEM_MC_ENDIF();
3992
3993 IEM_MC_ADVANCE_RIP_AND_FINISH();
3994 IEM_MC_END();
3995 }
3996 }
3997}
3998
3999
4000/**
4001 * @opcode 0x2b
4002 * @opcodesub !11 mr/reg
4003 * @oppfx none
4004 * @opcpuid sse
4005 * @opgroup og_sse1_cachect
4006 * @opxcpttype 1
4007 * @optest op1=1 op2=2 -> op1=2
4008 * @optest op1=0 op2=-42 -> op1=-42
4009 */
4010FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
4011{
4012 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4014 if (IEM_IS_MODRM_MEM_MODE(bRm))
4015 {
4016 /*
4017 * memory, register.
4018 */
4019 IEM_MC_BEGIN(0, 2, 0);
4020 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4022
4023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4025 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4026 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4027
4028 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4029 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4030
4031 IEM_MC_ADVANCE_RIP_AND_FINISH();
4032 IEM_MC_END();
4033 }
4034 /* The register, register encoding is invalid. */
4035 else
4036 IEMOP_RAISE_INVALID_OPCODE_RET();
4037}
4038
4039/**
4040 * @opcode 0x2b
4041 * @opcodesub !11 mr/reg
4042 * @oppfx 0x66
4043 * @opcpuid sse2
4044 * @opgroup og_sse2_cachect
4045 * @opxcpttype 1
4046 * @optest op1=1 op2=2 -> op1=2
4047 * @optest op1=0 op2=-42 -> op1=-42
4048 */
4049FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
4050{
4051 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4053 if (IEM_IS_MODRM_MEM_MODE(bRm))
4054 {
4055 /*
4056 * memory, register.
4057 */
4058 IEM_MC_BEGIN(0, 2, 0);
4059 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4061
4062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4064 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4065 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4066
4067 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4068 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4069
4070 IEM_MC_ADVANCE_RIP_AND_FINISH();
4071 IEM_MC_END();
4072 }
4073 /* The register, register encoding is invalid. */
4074 else
4075 IEMOP_RAISE_INVALID_OPCODE_RET();
4076}
4077/* Opcode 0xf3 0x0f 0x2b - invalid */
4078/* Opcode 0xf2 0x0f 0x2b - invalid */
4079
4080
4081/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
4082FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
4083{
4084 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4085 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4086 if (IEM_IS_MODRM_REG_MODE(bRm))
4087 {
4088 /*
4089 * Register, register.
4090 */
4091 IEM_MC_BEGIN(3, 1, 0);
4092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4093 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4094 IEM_MC_LOCAL(uint64_t, u64Dst);
4095 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4096 IEM_MC_ARG(uint64_t, u64Src, 2);
4097 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4098 IEM_MC_PREPARE_FPU_USAGE();
4099 IEM_MC_FPU_TO_MMX_MODE();
4100
4101 IEM_MC_REF_MXCSR(pfMxcsr);
4102 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4103
4104 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4105 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4106 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4107 } IEM_MC_ELSE() {
4108 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4109 } IEM_MC_ENDIF();
4110
4111 IEM_MC_ADVANCE_RIP_AND_FINISH();
4112 IEM_MC_END();
4113 }
4114 else
4115 {
4116 /*
4117 * Register, memory.
4118 */
4119 IEM_MC_BEGIN(3, 2, 0);
4120 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4121 IEM_MC_LOCAL(uint64_t, u64Dst);
4122 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4123 IEM_MC_ARG(uint64_t, u64Src, 2);
4124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4125
4126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4128 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4129 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4130
4131 IEM_MC_PREPARE_FPU_USAGE();
4132 IEM_MC_FPU_TO_MMX_MODE();
4133 IEM_MC_REF_MXCSR(pfMxcsr);
4134
4135 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4136 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4137 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4138 } IEM_MC_ELSE() {
4139 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4140 } IEM_MC_ENDIF();
4141
4142 IEM_MC_ADVANCE_RIP_AND_FINISH();
4143 IEM_MC_END();
4144 }
4145}
4146
4147
4148/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4149FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4150{
4151 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4153 if (IEM_IS_MODRM_REG_MODE(bRm))
4154 {
4155 /*
4156 * Register, register.
4157 */
4158 IEM_MC_BEGIN(3, 1, 0);
4159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4160 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4161 IEM_MC_LOCAL(uint64_t, u64Dst);
4162 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4163 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4164 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4165 IEM_MC_PREPARE_FPU_USAGE();
4166 IEM_MC_FPU_TO_MMX_MODE();
4167
4168 IEM_MC_REF_MXCSR(pfMxcsr);
4169 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4170
4171 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4172 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4173 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4174 } IEM_MC_ELSE() {
4175 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4176 } IEM_MC_ENDIF();
4177
4178 IEM_MC_ADVANCE_RIP_AND_FINISH();
4179 IEM_MC_END();
4180 }
4181 else
4182 {
4183 /*
4184 * Register, memory.
4185 */
4186 IEM_MC_BEGIN(3, 3, 0);
4187 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4188 IEM_MC_LOCAL(uint64_t, u64Dst);
4189 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4190 IEM_MC_LOCAL(X86XMMREG, uSrc);
4191 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4193
4194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4196 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4197 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4198
4199 IEM_MC_PREPARE_FPU_USAGE();
4200 IEM_MC_FPU_TO_MMX_MODE();
4201
4202 IEM_MC_REF_MXCSR(pfMxcsr);
4203
4204 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4205 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4206 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4207 } IEM_MC_ELSE() {
4208 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4209 } IEM_MC_ENDIF();
4210
4211 IEM_MC_ADVANCE_RIP_AND_FINISH();
4212 IEM_MC_END();
4213 }
4214}
4215
4216
4217/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4218FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4219{
4220 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4221
4222 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4223 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4224 {
4225 if (IEM_IS_MODRM_REG_MODE(bRm))
4226 {
4227 /* greg64, XMM */
4228 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT);
4229 IEM_MC_LOCAL(uint32_t, fMxcsr);
4230 IEM_MC_LOCAL(int64_t, i64Dst);
4231 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4232 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4233 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4234
4235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4236 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4237 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4238
4239 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4240 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4241 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4242 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4243 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4244 } IEM_MC_ELSE() {
4245 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4246 } IEM_MC_ENDIF();
4247
4248 IEM_MC_ADVANCE_RIP_AND_FINISH();
4249 IEM_MC_END();
4250 }
4251 else
4252 {
4253 /* greg64, [mem64] */
4254 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT);
4255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4256 IEM_MC_LOCAL(uint32_t, fMxcsr);
4257 IEM_MC_LOCAL(int64_t, i64Dst);
4258 IEM_MC_LOCAL(uint32_t, u32Src);
4259 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4260 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4261 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4262
4263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4265 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4266 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4267
4268 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4269 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4270 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4271 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4272 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4273 } IEM_MC_ELSE() {
4274 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4275 } IEM_MC_ENDIF();
4276
4277 IEM_MC_ADVANCE_RIP_AND_FINISH();
4278 IEM_MC_END();
4279 }
4280 }
4281 else
4282 {
4283 if (IEM_IS_MODRM_REG_MODE(bRm))
4284 {
4285 /* greg, XMM */
4286 IEM_MC_BEGIN(3, 2, 0);
4287 IEM_MC_LOCAL(uint32_t, fMxcsr);
4288 IEM_MC_LOCAL(int32_t, i32Dst);
4289 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4290 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4291 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4292
4293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4294 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4295 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4296
4297 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4298 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4299 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4300 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4301 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4302 } IEM_MC_ELSE() {
4303 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4304 } IEM_MC_ENDIF();
4305
4306 IEM_MC_ADVANCE_RIP_AND_FINISH();
4307 IEM_MC_END();
4308 }
4309 else
4310 {
4311 /* greg, [mem] */
4312 IEM_MC_BEGIN(3, 4, 0);
4313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4314 IEM_MC_LOCAL(uint32_t, fMxcsr);
4315 IEM_MC_LOCAL(int32_t, i32Dst);
4316 IEM_MC_LOCAL(uint32_t, u32Src);
4317 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4318 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4319 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4320
4321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4323 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4324 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4325
4326 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4327 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4328 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4329 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4330 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4331 } IEM_MC_ELSE() {
4332 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4333 } IEM_MC_ENDIF();
4334
4335 IEM_MC_ADVANCE_RIP_AND_FINISH();
4336 IEM_MC_END();
4337 }
4338 }
4339}
4340
4341
4342/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4343FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4344{
4345 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4346
4347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4348 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4349 {
4350 if (IEM_IS_MODRM_REG_MODE(bRm))
4351 {
4352 /* greg64, XMM */
4353 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT);
4354 IEM_MC_LOCAL(uint32_t, fMxcsr);
4355 IEM_MC_LOCAL(int64_t, i64Dst);
4356 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4357 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4358 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4359
4360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4361 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4362 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4363
4364 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4365 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4366 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4367 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4368 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4369 } IEM_MC_ELSE() {
4370 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4371 } IEM_MC_ENDIF();
4372
4373 IEM_MC_ADVANCE_RIP_AND_FINISH();
4374 IEM_MC_END();
4375 }
4376 else
4377 {
4378 /* greg64, [mem64] */
4379 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT);
4380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4381 IEM_MC_LOCAL(uint32_t, fMxcsr);
4382 IEM_MC_LOCAL(int64_t, i64Dst);
4383 IEM_MC_LOCAL(uint64_t, u64Src);
4384 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4385 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4386 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4387
4388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4390 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4391 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4392
4393 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4394 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4395 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4396 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4397 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4398 } IEM_MC_ELSE() {
4399 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4400 } IEM_MC_ENDIF();
4401
4402 IEM_MC_ADVANCE_RIP_AND_FINISH();
4403 IEM_MC_END();
4404 }
4405 }
4406 else
4407 {
4408 if (IEM_IS_MODRM_REG_MODE(bRm))
4409 {
4410 /* greg, XMM */
4411 IEM_MC_BEGIN(3, 2, 0);
4412 IEM_MC_LOCAL(uint32_t, fMxcsr);
4413 IEM_MC_LOCAL(int32_t, i32Dst);
4414 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4415 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4416 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4417
4418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4419 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4420 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4421
4422 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4423 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4424 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4425 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4426 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4427 } IEM_MC_ELSE() {
4428 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4429 } IEM_MC_ENDIF();
4430
4431 IEM_MC_ADVANCE_RIP_AND_FINISH();
4432 IEM_MC_END();
4433 }
4434 else
4435 {
4436 /* greg32, [mem32] */
4437 IEM_MC_BEGIN(3, 4, 0);
4438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4439 IEM_MC_LOCAL(uint32_t, fMxcsr);
4440 IEM_MC_LOCAL(int32_t, i32Dst);
4441 IEM_MC_LOCAL(uint64_t, u64Src);
4442 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4443 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4444 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4445
4446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4448 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4449 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4450
4451 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4452 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4453 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4454 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4455 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4456 } IEM_MC_ELSE() {
4457 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4458 } IEM_MC_ENDIF();
4459
4460 IEM_MC_ADVANCE_RIP_AND_FINISH();
4461 IEM_MC_END();
4462 }
4463 }
4464}
4465
4466
4467/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4468FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4469{
4470 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4472 if (IEM_IS_MODRM_REG_MODE(bRm))
4473 {
4474 /*
4475 * Register, register.
4476 */
4477 IEM_MC_BEGIN(3, 1, 0);
4478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4479 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4480 IEM_MC_LOCAL(uint64_t, u64Dst);
4481 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4482 IEM_MC_ARG(uint64_t, u64Src, 2);
4483
4484 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4485 IEM_MC_PREPARE_FPU_USAGE();
4486 IEM_MC_FPU_TO_MMX_MODE();
4487
4488 IEM_MC_REF_MXCSR(pfMxcsr);
4489 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4490
4491 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4492 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4493 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4494 } IEM_MC_ELSE() {
4495 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4496 } IEM_MC_ENDIF();
4497
4498 IEM_MC_ADVANCE_RIP_AND_FINISH();
4499 IEM_MC_END();
4500 }
4501 else
4502 {
4503 /*
4504 * Register, memory.
4505 */
4506 IEM_MC_BEGIN(3, 2, 0);
4507 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4508 IEM_MC_LOCAL(uint64_t, u64Dst);
4509 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4510 IEM_MC_ARG(uint64_t, u64Src, 2);
4511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4512
4513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4515 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4516 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4517
4518 IEM_MC_PREPARE_FPU_USAGE();
4519 IEM_MC_FPU_TO_MMX_MODE();
4520 IEM_MC_REF_MXCSR(pfMxcsr);
4521
4522 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4523 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4524 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4525 } IEM_MC_ELSE() {
4526 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4527 } IEM_MC_ENDIF();
4528
4529 IEM_MC_ADVANCE_RIP_AND_FINISH();
4530 IEM_MC_END();
4531 }
4532}
4533
4534
4535/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4536FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4537{
4538 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4539 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4540 if (IEM_IS_MODRM_REG_MODE(bRm))
4541 {
4542 /*
4543 * Register, register.
4544 */
4545 IEM_MC_BEGIN(3, 1, 0);
4546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4547 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4548 IEM_MC_LOCAL(uint64_t, u64Dst);
4549 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4550 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4551
4552 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4553 IEM_MC_PREPARE_FPU_USAGE();
4554 IEM_MC_FPU_TO_MMX_MODE();
4555
4556 IEM_MC_REF_MXCSR(pfMxcsr);
4557 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4558
4559 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4560 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4561 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4562 } IEM_MC_ELSE() {
4563 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4564 } IEM_MC_ENDIF();
4565
4566 IEM_MC_ADVANCE_RIP_AND_FINISH();
4567 IEM_MC_END();
4568 }
4569 else
4570 {
4571 /*
4572 * Register, memory.
4573 */
4574 IEM_MC_BEGIN(3, 3, 0);
4575 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4576 IEM_MC_LOCAL(uint64_t, u64Dst);
4577 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4578 IEM_MC_LOCAL(X86XMMREG, uSrc);
4579 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4581
4582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4584 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4585 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4586
4587 IEM_MC_PREPARE_FPU_USAGE();
4588 IEM_MC_FPU_TO_MMX_MODE();
4589
4590 IEM_MC_REF_MXCSR(pfMxcsr);
4591
4592 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4593 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4594 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4595 } IEM_MC_ELSE() {
4596 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4597 } IEM_MC_ENDIF();
4598
4599 IEM_MC_ADVANCE_RIP_AND_FINISH();
4600 IEM_MC_END();
4601 }
4602}
4603
4604
4605/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4606FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4607{
4608 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4609
4610 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4611 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4612 {
4613 if (IEM_IS_MODRM_REG_MODE(bRm))
4614 {
4615 /* greg64, XMM */
4616 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT);
4617 IEM_MC_LOCAL(uint32_t, fMxcsr);
4618 IEM_MC_LOCAL(int64_t, i64Dst);
4619 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4620 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4621 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4622
4623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4624 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4625 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4626
4627 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4628 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4629 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4630 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4631 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4632 } IEM_MC_ELSE() {
4633 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4634 } IEM_MC_ENDIF();
4635
4636 IEM_MC_ADVANCE_RIP_AND_FINISH();
4637 IEM_MC_END();
4638 }
4639 else
4640 {
4641 /* greg64, [mem64] */
4642 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT);
4643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4644 IEM_MC_LOCAL(uint32_t, fMxcsr);
4645 IEM_MC_LOCAL(int64_t, i64Dst);
4646 IEM_MC_LOCAL(uint32_t, u32Src);
4647 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4648 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4649 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4650
4651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4653 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4654 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4655
4656 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4657 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4658 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4659 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4660 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4661 } IEM_MC_ELSE() {
4662 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4663 } IEM_MC_ENDIF();
4664
4665 IEM_MC_ADVANCE_RIP_AND_FINISH();
4666 IEM_MC_END();
4667 }
4668 }
4669 else
4670 {
4671 if (IEM_IS_MODRM_REG_MODE(bRm))
4672 {
4673 /* greg, XMM */
4674 IEM_MC_BEGIN(3, 2, 0);
4675 IEM_MC_LOCAL(uint32_t, fMxcsr);
4676 IEM_MC_LOCAL(int32_t, i32Dst);
4677 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4678 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4679 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4680
4681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4682 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4683 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4684
4685 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4686 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4687 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4688 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4689 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4690 } IEM_MC_ELSE() {
4691 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4692 } IEM_MC_ENDIF();
4693
4694 IEM_MC_ADVANCE_RIP_AND_FINISH();
4695 IEM_MC_END();
4696 }
4697 else
4698 {
4699 /* greg, [mem] */
4700 IEM_MC_BEGIN(3, 4, 0);
4701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4702 IEM_MC_LOCAL(uint32_t, fMxcsr);
4703 IEM_MC_LOCAL(int32_t, i32Dst);
4704 IEM_MC_LOCAL(uint32_t, u32Src);
4705 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4706 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4707 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4708
4709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4711 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4712 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4713
4714 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4715 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4716 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4717 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4718 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4719 } IEM_MC_ELSE() {
4720 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4721 } IEM_MC_ENDIF();
4722
4723 IEM_MC_ADVANCE_RIP_AND_FINISH();
4724 IEM_MC_END();
4725 }
4726 }
4727}
4728
4729
4730/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4731FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4732{
4733 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4734
4735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4736 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4737 {
4738 if (IEM_IS_MODRM_REG_MODE(bRm))
4739 {
4740 /* greg64, XMM */
4741 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT);
4742 IEM_MC_LOCAL(uint32_t, fMxcsr);
4743 IEM_MC_LOCAL(int64_t, i64Dst);
4744 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4745 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4746 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4747
4748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4749 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4750 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4751
4752 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4753 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4754 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4755 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4756 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4757 } IEM_MC_ELSE() {
4758 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4759 } IEM_MC_ENDIF();
4760
4761 IEM_MC_ADVANCE_RIP_AND_FINISH();
4762 IEM_MC_END();
4763 }
4764 else
4765 {
4766 /* greg64, [mem64] */
4767 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT);
4768 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4769 IEM_MC_LOCAL(uint32_t, fMxcsr);
4770 IEM_MC_LOCAL(int64_t, i64Dst);
4771 IEM_MC_LOCAL(uint64_t, u64Src);
4772 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4773 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4774 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4775
4776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4778 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4779 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4780
4781 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4782 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4783 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4784 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4785 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4786 } IEM_MC_ELSE() {
4787 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4788 } IEM_MC_ENDIF();
4789
4790 IEM_MC_ADVANCE_RIP_AND_FINISH();
4791 IEM_MC_END();
4792 }
4793 }
4794 else
4795 {
4796 if (IEM_IS_MODRM_REG_MODE(bRm))
4797 {
4798 /* greg32, XMM */
4799 IEM_MC_BEGIN(3, 2, 0);
4800 IEM_MC_LOCAL(uint32_t, fMxcsr);
4801 IEM_MC_LOCAL(int32_t, i32Dst);
4802 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4803 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4804 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4805
4806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4807 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4808 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4809
4810 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4811 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4812 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4813 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4814 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4815 } IEM_MC_ELSE() {
4816 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4817 } IEM_MC_ENDIF();
4818
4819 IEM_MC_ADVANCE_RIP_AND_FINISH();
4820 IEM_MC_END();
4821 }
4822 else
4823 {
4824 /* greg32, [mem64] */
4825 IEM_MC_BEGIN(3, 4, 0);
4826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4827 IEM_MC_LOCAL(uint32_t, fMxcsr);
4828 IEM_MC_LOCAL(int32_t, i32Dst);
4829 IEM_MC_LOCAL(uint64_t, u64Src);
4830 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4831 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4832 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4833
4834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4836 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4837 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4838
4839 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4840 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4841 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4842 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4843 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4844 } IEM_MC_ELSE() {
4845 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4846 } IEM_MC_ENDIF();
4847
4848 IEM_MC_ADVANCE_RIP_AND_FINISH();
4849 IEM_MC_END();
4850 }
4851 }
4852}
4853
4854
4855/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
4856FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4857{
4858 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4859 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4860 if (IEM_IS_MODRM_REG_MODE(bRm))
4861 {
4862 /*
4863 * Register, register.
4864 */
4865 IEM_MC_BEGIN(4, 1, 0);
4866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4867 IEM_MC_LOCAL(uint32_t, fEFlags);
4868 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4869 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4870 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4871 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4872 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4873 IEM_MC_PREPARE_SSE_USAGE();
4874 IEM_MC_FETCH_EFLAGS(fEFlags);
4875 IEM_MC_REF_MXCSR(pfMxcsr);
4876 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4877 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4878 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4879 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4880 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4881 } IEM_MC_ELSE() {
4882 IEM_MC_COMMIT_EFLAGS(fEFlags);
4883 } IEM_MC_ENDIF();
4884
4885 IEM_MC_ADVANCE_RIP_AND_FINISH();
4886 IEM_MC_END();
4887 }
4888 else
4889 {
4890 /*
4891 * Register, memory.
4892 */
4893 IEM_MC_BEGIN(4, 3, 0);
4894 IEM_MC_LOCAL(uint32_t, fEFlags);
4895 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4896 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4897 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4898 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4899 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4901
4902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4904 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4905 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4906
4907 IEM_MC_PREPARE_SSE_USAGE();
4908 IEM_MC_FETCH_EFLAGS(fEFlags);
4909 IEM_MC_REF_MXCSR(pfMxcsr);
4910 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4911 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4912 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4913 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4914 } IEM_MC_ELSE() {
4915 IEM_MC_COMMIT_EFLAGS(fEFlags);
4916 } IEM_MC_ENDIF();
4917
4918 IEM_MC_ADVANCE_RIP_AND_FINISH();
4919 IEM_MC_END();
4920 }
4921}
4922
4923
4924/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
4925FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4926{
4927 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4928 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4929 if (IEM_IS_MODRM_REG_MODE(bRm))
4930 {
4931 /*
4932 * Register, register.
4933 */
4934 IEM_MC_BEGIN(4, 1, 0);
4935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4936 IEM_MC_LOCAL(uint32_t, fEFlags);
4937 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4938 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4939 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4940 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4941 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4942 IEM_MC_PREPARE_SSE_USAGE();
4943 IEM_MC_FETCH_EFLAGS(fEFlags);
4944 IEM_MC_REF_MXCSR(pfMxcsr);
4945 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4946 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4947 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4948 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4949 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4950 } IEM_MC_ELSE() {
4951 IEM_MC_COMMIT_EFLAGS(fEFlags);
4952 } IEM_MC_ENDIF();
4953
4954 IEM_MC_ADVANCE_RIP_AND_FINISH();
4955 IEM_MC_END();
4956 }
4957 else
4958 {
4959 /*
4960 * Register, memory.
4961 */
4962 IEM_MC_BEGIN(4, 3, 0);
4963 IEM_MC_LOCAL(uint32_t, fEFlags);
4964 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4965 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4966 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4967 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4968 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4970
4971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4973 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4974 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4975
4976 IEM_MC_PREPARE_SSE_USAGE();
4977 IEM_MC_FETCH_EFLAGS(fEFlags);
4978 IEM_MC_REF_MXCSR(pfMxcsr);
4979 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4980 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4981 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4982 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4983 } IEM_MC_ELSE() {
4984 IEM_MC_COMMIT_EFLAGS(fEFlags);
4985 } IEM_MC_ENDIF();
4986
4987 IEM_MC_ADVANCE_RIP_AND_FINISH();
4988 IEM_MC_END();
4989 }
4990}
4991
4992
4993/* Opcode 0xf3 0x0f 0x2e - invalid */
4994/* Opcode 0xf2 0x0f 0x2e - invalid */
4995
4996
4997/** Opcode 0x0f 0x2f - comiss Vss, Wss */
4998FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
4999{
5000 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5001 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5002 if (IEM_IS_MODRM_REG_MODE(bRm))
5003 {
5004 /*
5005 * Register, register.
5006 */
5007 IEM_MC_BEGIN(4, 1, 0);
5008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5009 IEM_MC_LOCAL(uint32_t, fEFlags);
5010 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5011 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5012 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5013 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5014 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5015 IEM_MC_PREPARE_SSE_USAGE();
5016 IEM_MC_FETCH_EFLAGS(fEFlags);
5017 IEM_MC_REF_MXCSR(pfMxcsr);
5018 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5019 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5020 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5021 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5022 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5023 } IEM_MC_ELSE() {
5024 IEM_MC_COMMIT_EFLAGS(fEFlags);
5025 } IEM_MC_ENDIF();
5026
5027 IEM_MC_ADVANCE_RIP_AND_FINISH();
5028 IEM_MC_END();
5029 }
5030 else
5031 {
5032 /*
5033 * Register, memory.
5034 */
5035 IEM_MC_BEGIN(4, 3, 0);
5036 IEM_MC_LOCAL(uint32_t, fEFlags);
5037 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5038 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5039 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5040 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5041 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5043
5044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5046 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5047 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5048
5049 IEM_MC_PREPARE_SSE_USAGE();
5050 IEM_MC_FETCH_EFLAGS(fEFlags);
5051 IEM_MC_REF_MXCSR(pfMxcsr);
5052 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5053 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5054 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5055 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5056 } IEM_MC_ELSE() {
5057 IEM_MC_COMMIT_EFLAGS(fEFlags);
5058 } IEM_MC_ENDIF();
5059
5060 IEM_MC_ADVANCE_RIP_AND_FINISH();
5061 IEM_MC_END();
5062 }
5063}
5064
5065
5066/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
5067FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
5068{
5069 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5070 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5071 if (IEM_IS_MODRM_REG_MODE(bRm))
5072 {
5073 /*
5074 * Register, register.
5075 */
5076 IEM_MC_BEGIN(4, 1, 0);
5077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5078 IEM_MC_LOCAL(uint32_t, fEFlags);
5079 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5080 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5081 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5082 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5083 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5084 IEM_MC_PREPARE_SSE_USAGE();
5085 IEM_MC_FETCH_EFLAGS(fEFlags);
5086 IEM_MC_REF_MXCSR(pfMxcsr);
5087 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5088 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5089 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5090 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5091 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5092 } IEM_MC_ELSE() {
5093 IEM_MC_COMMIT_EFLAGS(fEFlags);
5094 } IEM_MC_ENDIF();
5095
5096 IEM_MC_ADVANCE_RIP_AND_FINISH();
5097 IEM_MC_END();
5098 }
5099 else
5100 {
5101 /*
5102 * Register, memory.
5103 */
5104 IEM_MC_BEGIN(4, 3, 0);
5105 IEM_MC_LOCAL(uint32_t, fEFlags);
5106 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5107 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5108 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5109 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5110 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5111 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5112
5113 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5115 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5116 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5117
5118 IEM_MC_PREPARE_SSE_USAGE();
5119 IEM_MC_FETCH_EFLAGS(fEFlags);
5120 IEM_MC_REF_MXCSR(pfMxcsr);
5121 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5122 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5123 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5124 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5125 } IEM_MC_ELSE() {
5126 IEM_MC_COMMIT_EFLAGS(fEFlags);
5127 } IEM_MC_ENDIF();
5128
5129 IEM_MC_ADVANCE_RIP_AND_FINISH();
5130 IEM_MC_END();
5131 }
5132}
5133
5134
5135/* Opcode 0xf3 0x0f 0x2f - invalid */
5136/* Opcode 0xf2 0x0f 0x2f - invalid */
5137
5138/** Opcode 0x0f 0x30. */
5139FNIEMOP_DEF(iemOp_wrmsr)
5140{
5141 IEMOP_MNEMONIC(wrmsr, "wrmsr");
5142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5143 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_wrmsr);
5144}
5145
5146
5147/** Opcode 0x0f 0x31. */
5148FNIEMOP_DEF(iemOp_rdtsc)
5149{
5150 IEMOP_MNEMONIC(rdtsc, "rdtsc");
5151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5152 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdtsc);
5153}
5154
5155
5156/** Opcode 0x0f 0x33. */
5157FNIEMOP_DEF(iemOp_rdmsr)
5158{
5159 IEMOP_MNEMONIC(rdmsr, "rdmsr");
5160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5161 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdmsr);
5162}
5163
5164
5165/** Opcode 0x0f 0x34. */
5166FNIEMOP_DEF(iemOp_rdpmc)
5167{
5168 IEMOP_MNEMONIC(rdpmc, "rdpmc");
5169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5170 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdpmc);
5171}
5172
5173
5174/** Opcode 0x0f 0x34. */
5175FNIEMOP_DEF(iemOp_sysenter)
5176{
5177 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5179 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
5180 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
5181 iemCImpl_sysenter);
5182}
5183
5184/** Opcode 0x0f 0x35. */
5185FNIEMOP_DEF(iemOp_sysexit)
5186{
5187 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5189 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
5190 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
5191 iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
5192}
5193
5194/** Opcode 0x0f 0x37. */
5195FNIEMOP_STUB(iemOp_getsec);
5196
5197
5198/** Opcode 0x0f 0x38. */
5199FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
5200{
5201#ifdef IEM_WITH_THREE_0F_38
5202 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5203 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5204#else
5205 IEMOP_BITCH_ABOUT_STUB();
5206 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5207#endif
5208}
5209
5210
5211/** Opcode 0x0f 0x3a. */
5212FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
5213{
5214#ifdef IEM_WITH_THREE_0F_3A
5215 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5216 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5217#else
5218 IEMOP_BITCH_ABOUT_STUB();
5219 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5220#endif
5221}
5222
5223
5224/**
5225 * Implements a conditional move.
5226 *
5227 * Wish there was an obvious way to do this where we could share and reduce
5228 * code bloat.
5229 *
5230 * @param a_Cnd The conditional "microcode" operation.
5231 */
5232#define CMOV_X(a_Cnd) \
5233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5234 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5235 { \
5236 switch (pVCpu->iem.s.enmEffOpSize) \
5237 { \
5238 case IEMMODE_16BIT: \
5239 IEM_MC_BEGIN(0, 1, 0); \
5240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5241 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5242 a_Cnd { \
5243 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5244 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5245 } IEM_MC_ENDIF(); \
5246 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5247 IEM_MC_END(); \
5248 break; \
5249 \
5250 case IEMMODE_32BIT: \
5251 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386); \
5252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5253 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5254 a_Cnd { \
5255 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5256 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5257 } IEM_MC_ELSE() { \
5258 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5259 } IEM_MC_ENDIF(); \
5260 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5261 IEM_MC_END(); \
5262 break; \
5263 \
5264 case IEMMODE_64BIT: \
5265 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT); \
5266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5267 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5268 a_Cnd { \
5269 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5270 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5271 } IEM_MC_ENDIF(); \
5272 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5273 IEM_MC_END(); \
5274 break; \
5275 \
5276 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5277 } \
5278 } \
5279 else \
5280 { \
5281 switch (pVCpu->iem.s.enmEffOpSize) \
5282 { \
5283 case IEMMODE_16BIT: \
5284 IEM_MC_BEGIN(0, 2, 0); \
5285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5286 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5289 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5290 a_Cnd { \
5291 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5292 } IEM_MC_ENDIF(); \
5293 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5294 IEM_MC_END(); \
5295 break; \
5296 \
5297 case IEMMODE_32BIT: \
5298 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386); \
5299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5300 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5303 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5304 a_Cnd { \
5305 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5306 } IEM_MC_ELSE() { \
5307 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5308 } IEM_MC_ENDIF(); \
5309 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5310 IEM_MC_END(); \
5311 break; \
5312 \
5313 case IEMMODE_64BIT: \
5314 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT); \
5315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5316 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5319 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5320 a_Cnd { \
5321 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5322 } IEM_MC_ENDIF(); \
5323 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5324 IEM_MC_END(); \
5325 break; \
5326 \
5327 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5328 } \
5329 } do {} while (0)
5330
5331
5332
5333/** Opcode 0x0f 0x40. */
5334FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5335{
5336 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5337 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5338}
5339
5340
5341/** Opcode 0x0f 0x41. */
5342FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5343{
5344 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5345 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5346}
5347
5348
5349/** Opcode 0x0f 0x42. */
5350FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5351{
5352 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5353 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5354}
5355
5356
5357/** Opcode 0x0f 0x43. */
5358FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5359{
5360 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5361 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5362}
5363
5364
5365/** Opcode 0x0f 0x44. */
5366FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5367{
5368 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5369 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5370}
5371
5372
5373/** Opcode 0x0f 0x45. */
5374FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5375{
5376 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5377 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5378}
5379
5380
5381/** Opcode 0x0f 0x46. */
5382FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5383{
5384 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5385 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5386}
5387
5388
5389/** Opcode 0x0f 0x47. */
5390FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5391{
5392 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5393 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5394}
5395
5396
5397/** Opcode 0x0f 0x48. */
5398FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5399{
5400 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5401 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5402}
5403
5404
5405/** Opcode 0x0f 0x49. */
5406FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5407{
5408 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5409 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5410}
5411
5412
5413/** Opcode 0x0f 0x4a. */
5414FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5415{
5416 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5417 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5418}
5419
5420
5421/** Opcode 0x0f 0x4b. */
5422FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5423{
5424 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5425 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5426}
5427
5428
5429/** Opcode 0x0f 0x4c. */
5430FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5431{
5432 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5433 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5434}
5435
5436
5437/** Opcode 0x0f 0x4d. */
5438FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5439{
5440 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5441 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5442}
5443
5444
5445/** Opcode 0x0f 0x4e. */
5446FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5447{
5448 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5449 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5450}
5451
5452
5453/** Opcode 0x0f 0x4f. */
5454FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5455{
5456 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5457 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5458}
5459
5460#undef CMOV_X
5461
5462/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5463FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5464{
5465 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5467 if (IEM_IS_MODRM_REG_MODE(bRm))
5468 {
5469 /*
5470 * Register, register.
5471 */
5472 IEM_MC_BEGIN(2, 1, 0);
5473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5474 IEM_MC_LOCAL(uint8_t, u8Dst);
5475 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5476 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5477 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5478 IEM_MC_PREPARE_SSE_USAGE();
5479 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5480 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5481 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5482 IEM_MC_ADVANCE_RIP_AND_FINISH();
5483 IEM_MC_END();
5484 }
5485 /* No memory operand. */
5486 else
5487 IEMOP_RAISE_INVALID_OPCODE_RET();
5488}
5489
5490
5491/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5492FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5493{
5494 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5495 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5496 if (IEM_IS_MODRM_REG_MODE(bRm))
5497 {
5498 /*
5499 * Register, register.
5500 */
5501 IEM_MC_BEGIN(2, 1, 0);
5502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5503 IEM_MC_LOCAL(uint8_t, u8Dst);
5504 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5505 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5506 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5507 IEM_MC_PREPARE_SSE_USAGE();
5508 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5509 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5510 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG_8(bRm), u8Dst);
5511 IEM_MC_ADVANCE_RIP_AND_FINISH();
5512 IEM_MC_END();
5513 }
5514 /* No memory operand. */
5515 else
5516 IEMOP_RAISE_INVALID_OPCODE_RET();
5517
5518}
5519
5520
5521/* Opcode 0xf3 0x0f 0x50 - invalid */
5522/* Opcode 0xf2 0x0f 0x50 - invalid */
5523
5524
5525/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5526FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5527{
5528 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5529 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5530}
5531
5532
5533/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5534FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5535{
5536 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5537 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5538}
5539
5540
5541/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5542FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5543{
5544 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5545 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5546}
5547
5548
5549/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5550FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5551{
5552 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5553 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5554}
5555
5556
5557/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5558FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5559{
5560 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5561 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5562}
5563
5564
5565/* Opcode 0x66 0x0f 0x52 - invalid */
5566
5567
5568/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5569FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5570{
5571 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5572 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5573}
5574
5575
5576/* Opcode 0xf2 0x0f 0x52 - invalid */
5577
5578/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5579FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
5580/* Opcode 0x66 0x0f 0x53 - invalid */
5581/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5582FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
5583/* Opcode 0xf2 0x0f 0x53 - invalid */
5584
5585
5586/** Opcode 0x0f 0x54 - andps Vps, Wps */
5587FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5588{
5589 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5590 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pand_u128);
5591}
5592
5593
5594/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5595FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5596{
5597 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5598 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5599}
5600
5601
5602/* Opcode 0xf3 0x0f 0x54 - invalid */
5603/* Opcode 0xf2 0x0f 0x54 - invalid */
5604
5605
5606/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5607FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5608{
5609 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5610 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pandn_u128);
5611}
5612
5613
5614/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5615FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5616{
5617 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5618 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5619}
5620
5621
5622/* Opcode 0xf3 0x0f 0x55 - invalid */
5623/* Opcode 0xf2 0x0f 0x55 - invalid */
5624
5625
5626/** Opcode 0x0f 0x56 - orps Vps, Wps */
5627FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5628{
5629 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5630 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_por_u128);
5631}
5632
5633
5634/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5635FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5636{
5637 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5638 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5639}
5640
5641
5642/* Opcode 0xf3 0x0f 0x56 - invalid */
5643/* Opcode 0xf2 0x0f 0x56 - invalid */
5644
5645
5646/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5647FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5648{
5649 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5650 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pxor_u128);
5651}
5652
5653
5654/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5655FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5656{
5657 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5658 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5659}
5660
5661
5662/* Opcode 0xf3 0x0f 0x57 - invalid */
5663/* Opcode 0xf2 0x0f 0x57 - invalid */
5664
5665/** Opcode 0x0f 0x58 - addps Vps, Wps */
5666FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5667{
5668 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5669 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5670}
5671
5672
5673/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5674FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5675{
5676 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5677 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5678}
5679
5680
5681/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5682FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5683{
5684 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5685 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5686}
5687
5688
5689/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5690FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5691{
5692 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5693 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5694}
5695
5696
5697/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5698FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5699{
5700 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5701 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5702}
5703
5704
5705/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5706FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5707{
5708 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5709 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5710}
5711
5712
5713/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5714FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5715{
5716 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5717 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5718}
5719
5720
5721/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5722FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5723{
5724 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5725 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5726}
5727
5728
5729/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5730FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5731{
5732 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5733 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5734}
5735
5736
5737/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5738FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5739{
5740 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5741 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5742}
5743
5744
5745/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5746FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5747{
5748 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5749 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5750}
5751
5752
5753/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5754FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5755{
5756 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5757 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5758}
5759
5760
5761/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5762FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5763{
5764 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5765 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5766}
5767
5768
5769/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5770FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5771{
5772 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5773 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5774}
5775
5776
5777/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5778FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5779{
5780 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5781 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5782}
5783
5784
5785/* Opcode 0xf2 0x0f 0x5b - invalid */
5786
5787
5788/** Opcode 0x0f 0x5c - subps Vps, Wps */
5789FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5790{
5791 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5792 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5793}
5794
5795
5796/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5797FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5798{
5799 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5800 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5801}
5802
5803
5804/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5805FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5806{
5807 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5808 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5809}
5810
5811
5812/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5813FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5814{
5815 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5816 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5817}
5818
5819
5820/** Opcode 0x0f 0x5d - minps Vps, Wps */
5821FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5822{
5823 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5824 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5825}
5826
5827
5828/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5829FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5830{
5831 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5832 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5833}
5834
5835
5836/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5837FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5838{
5839 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5840 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5841}
5842
5843
5844/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5845FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5846{
5847 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5848 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5849}
5850
5851
5852/** Opcode 0x0f 0x5e - divps Vps, Wps */
5853FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5854{
5855 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5856 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5857}
5858
5859
5860/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5861FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5862{
5863 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5864 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5865}
5866
5867
5868/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5869FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5870{
5871 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5872 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5873}
5874
5875
5876/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5877FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5878{
5879 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5880 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5881}
5882
5883
5884/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5885FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5886{
5887 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5888 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5889}
5890
5891
5892/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5893FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5894{
5895 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5896 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5897}
5898
5899
5900/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5901FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5902{
5903 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5904 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5905}
5906
5907
5908/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5909FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5910{
5911 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5912 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5913}
5914
5915
5916/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5917FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5918{
5919 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5920 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5921}
5922
5923
5924/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5925FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5926{
5927 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5928 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5929}
5930
5931
5932/* Opcode 0xf3 0x0f 0x60 - invalid */
5933
5934
5935/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5936FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5937{
5938 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5939 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5940 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5941}
5942
5943
5944/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5945FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5946{
5947 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5948 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5949}
5950
5951
5952/* Opcode 0xf3 0x0f 0x61 - invalid */
5953
5954
5955/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5956FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5957{
5958 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5959 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5960}
5961
5962
5963/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5964FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5965{
5966 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5967 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5968}
5969
5970
5971/* Opcode 0xf3 0x0f 0x62 - invalid */
5972
5973
5974
5975/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5976FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5977{
5978 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5979 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5980}
5981
5982
5983/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5984FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5985{
5986 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5987 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5988}
5989
5990
5991/* Opcode 0xf3 0x0f 0x63 - invalid */
5992
5993
5994/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5995FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5996{
5997 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5998 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5999}
6000
6001
6002/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
6003FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
6004{
6005 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6006 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
6007}
6008
6009
6010/* Opcode 0xf3 0x0f 0x64 - invalid */
6011
6012
6013/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
6014FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
6015{
6016 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6017 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
6018}
6019
6020
6021/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
6022FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
6023{
6024 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6025 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
6026}
6027
6028
6029/* Opcode 0xf3 0x0f 0x65 - invalid */
6030
6031
6032/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
6033FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
6034{
6035 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6036 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
6037}
6038
6039
6040/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
6041FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
6042{
6043 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6044 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
6045}
6046
6047
6048/* Opcode 0xf3 0x0f 0x66 - invalid */
6049
6050
6051/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
6052FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
6053{
6054 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6055 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
6056}
6057
6058
6059/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
6060FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
6061{
6062 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6063 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
6064}
6065
6066
6067/* Opcode 0xf3 0x0f 0x67 - invalid */
6068
6069
6070/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
6071 * @note Intel and AMD both uses Qd for the second parameter, however they
6072 * both list it as a mmX/mem64 operand and intel describes it as being
6073 * loaded as a qword, so it should be Qq, shouldn't it? */
6074FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
6075{
6076 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6077 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
6078}
6079
6080
6081/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
6082FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
6083{
6084 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6085 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
6086}
6087
6088
6089/* Opcode 0xf3 0x0f 0x68 - invalid */
6090
6091
6092/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
6093 * @note Intel and AMD both uses Qd for the second parameter, however they
6094 * both list it as a mmX/mem64 operand and intel describes it as being
6095 * loaded as a qword, so it should be Qq, shouldn't it? */
6096FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
6097{
6098 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6099 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
6100}
6101
6102
6103/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
6104FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
6105{
6106 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6107 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
6108
6109}
6110
6111
6112/* Opcode 0xf3 0x0f 0x69 - invalid */
6113
6114
6115/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
6116 * @note Intel and AMD both uses Qd for the second parameter, however they
6117 * both list it as a mmX/mem64 operand and intel describes it as being
6118 * loaded as a qword, so it should be Qq, shouldn't it? */
6119FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
6120{
6121 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6122 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
6123}
6124
6125
6126/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
6127FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
6128{
6129 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6130 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
6131}
6132
6133
6134/* Opcode 0xf3 0x0f 0x6a - invalid */
6135
6136
6137/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
6138FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
6139{
6140 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6141 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
6142}
6143
6144
6145/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
6146FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
6147{
6148 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6149 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
6150}
6151
6152
6153/* Opcode 0xf3 0x0f 0x6b - invalid */
6154
6155
6156/* Opcode 0x0f 0x6c - invalid */
6157
6158
6159/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
6160FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
6161{
6162 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6163 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
6164}
6165
6166
6167/* Opcode 0xf3 0x0f 0x6c - invalid */
6168/* Opcode 0xf2 0x0f 0x6c - invalid */
6169
6170
6171/* Opcode 0x0f 0x6d - invalid */
6172
6173
6174/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
6175FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
6176{
6177 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6178 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
6179}
6180
6181
6182/* Opcode 0xf3 0x0f 0x6d - invalid */
6183
6184
6185FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6186{
6187 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6188 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6189 {
6190 /**
6191 * @opcode 0x6e
6192 * @opcodesub rex.w=1
6193 * @oppfx none
6194 * @opcpuid mmx
6195 * @opgroup og_mmx_datamove
6196 * @opxcpttype 5
6197 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6198 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6199 */
6200 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6201 if (IEM_IS_MODRM_REG_MODE(bRm))
6202 {
6203 /* MMX, greg64 */
6204 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
6205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6206 IEM_MC_LOCAL(uint64_t, u64Tmp);
6207
6208 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6209 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6210 IEM_MC_FPU_TO_MMX_MODE();
6211
6212 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6213 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6214
6215 IEM_MC_ADVANCE_RIP_AND_FINISH();
6216 IEM_MC_END();
6217 }
6218 else
6219 {
6220 /* MMX, [mem64] */
6221 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
6222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6223 IEM_MC_LOCAL(uint64_t, u64Tmp);
6224
6225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6227 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6228 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6229 IEM_MC_FPU_TO_MMX_MODE();
6230
6231 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6232 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6233
6234 IEM_MC_ADVANCE_RIP_AND_FINISH();
6235 IEM_MC_END();
6236 }
6237 }
6238 else
6239 {
6240 /**
6241 * @opdone
6242 * @opcode 0x6e
6243 * @opcodesub rex.w=0
6244 * @oppfx none
6245 * @opcpuid mmx
6246 * @opgroup og_mmx_datamove
6247 * @opxcpttype 5
6248 * @opfunction iemOp_movd_q_Pd_Ey
6249 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6250 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6251 */
6252 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6253 if (IEM_IS_MODRM_REG_MODE(bRm))
6254 {
6255 /* MMX, greg32 */
6256 IEM_MC_BEGIN(0, 1, 0);
6257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6258 IEM_MC_LOCAL(uint32_t, u32Tmp);
6259
6260 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6261 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6262 IEM_MC_FPU_TO_MMX_MODE();
6263
6264 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6265 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6266
6267 IEM_MC_ADVANCE_RIP_AND_FINISH();
6268 IEM_MC_END();
6269 }
6270 else
6271 {
6272 /* MMX, [mem32] */
6273 IEM_MC_BEGIN(0, 2, 0);
6274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6275 IEM_MC_LOCAL(uint32_t, u32Tmp);
6276
6277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6279 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6280 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6281 IEM_MC_FPU_TO_MMX_MODE();
6282
6283 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6284 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6285
6286 IEM_MC_ADVANCE_RIP_AND_FINISH();
6287 IEM_MC_END();
6288 }
6289 }
6290}
6291
6292FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6293{
6294 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6295 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6296 {
6297 /**
6298 * @opcode 0x6e
6299 * @opcodesub rex.w=1
6300 * @oppfx 0x66
6301 * @opcpuid sse2
6302 * @opgroup og_sse2_simdint_datamove
6303 * @opxcpttype 5
6304 * @optest 64-bit / op1=1 op2=2 -> op1=2
6305 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6306 */
6307 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6308 if (IEM_IS_MODRM_REG_MODE(bRm))
6309 {
6310 /* XMM, greg64 */
6311 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
6312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6313 IEM_MC_LOCAL(uint64_t, u64Tmp);
6314
6315 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6316 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6317
6318 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6319 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6320
6321 IEM_MC_ADVANCE_RIP_AND_FINISH();
6322 IEM_MC_END();
6323 }
6324 else
6325 {
6326 /* XMM, [mem64] */
6327 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
6328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6329 IEM_MC_LOCAL(uint64_t, u64Tmp);
6330
6331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6333 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6334 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6335
6336 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6337 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6338
6339 IEM_MC_ADVANCE_RIP_AND_FINISH();
6340 IEM_MC_END();
6341 }
6342 }
6343 else
6344 {
6345 /**
6346 * @opdone
6347 * @opcode 0x6e
6348 * @opcodesub rex.w=0
6349 * @oppfx 0x66
6350 * @opcpuid sse2
6351 * @opgroup og_sse2_simdint_datamove
6352 * @opxcpttype 5
6353 * @opfunction iemOp_movd_q_Vy_Ey
6354 * @optest op1=1 op2=2 -> op1=2
6355 * @optest op1=0 op2=-42 -> op1=-42
6356 */
6357 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6358 if (IEM_IS_MODRM_REG_MODE(bRm))
6359 {
6360 /* XMM, greg32 */
6361 IEM_MC_BEGIN(0, 1, 0);
6362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6363 IEM_MC_LOCAL(uint32_t, u32Tmp);
6364
6365 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6366 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6367
6368 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6369 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6370
6371 IEM_MC_ADVANCE_RIP_AND_FINISH();
6372 IEM_MC_END();
6373 }
6374 else
6375 {
6376 /* XMM, [mem32] */
6377 IEM_MC_BEGIN(0, 2, 0);
6378 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6379 IEM_MC_LOCAL(uint32_t, u32Tmp);
6380
6381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6383 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6384 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6385
6386 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6387 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6388
6389 IEM_MC_ADVANCE_RIP_AND_FINISH();
6390 IEM_MC_END();
6391 }
6392 }
6393}
6394
6395/* Opcode 0xf3 0x0f 0x6e - invalid */
6396
6397
6398/**
6399 * @opcode 0x6f
6400 * @oppfx none
6401 * @opcpuid mmx
6402 * @opgroup og_mmx_datamove
6403 * @opxcpttype 5
6404 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6405 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6406 */
6407FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6408{
6409 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6410 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6411 if (IEM_IS_MODRM_REG_MODE(bRm))
6412 {
6413 /*
6414 * Register, register.
6415 */
6416 IEM_MC_BEGIN(0, 1, 0);
6417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6418 IEM_MC_LOCAL(uint64_t, u64Tmp);
6419
6420 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6421 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6422 IEM_MC_FPU_TO_MMX_MODE();
6423
6424 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6425 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6426
6427 IEM_MC_ADVANCE_RIP_AND_FINISH();
6428 IEM_MC_END();
6429 }
6430 else
6431 {
6432 /*
6433 * Register, memory.
6434 */
6435 IEM_MC_BEGIN(0, 2, 0);
6436 IEM_MC_LOCAL(uint64_t, u64Tmp);
6437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6438
6439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6441 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6442 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6443 IEM_MC_FPU_TO_MMX_MODE();
6444
6445 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6446 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6447
6448 IEM_MC_ADVANCE_RIP_AND_FINISH();
6449 IEM_MC_END();
6450 }
6451}
6452
6453/**
6454 * @opcode 0x6f
6455 * @oppfx 0x66
6456 * @opcpuid sse2
6457 * @opgroup og_sse2_simdint_datamove
6458 * @opxcpttype 1
6459 * @optest op1=1 op2=2 -> op1=2
6460 * @optest op1=0 op2=-42 -> op1=-42
6461 */
6462FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6463{
6464 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6465 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6466 if (IEM_IS_MODRM_REG_MODE(bRm))
6467 {
6468 /*
6469 * Register, register.
6470 */
6471 IEM_MC_BEGIN(0, 0, 0);
6472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6473
6474 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6475 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6476
6477 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6478 IEM_GET_MODRM_RM(pVCpu, bRm));
6479 IEM_MC_ADVANCE_RIP_AND_FINISH();
6480 IEM_MC_END();
6481 }
6482 else
6483 {
6484 /*
6485 * Register, memory.
6486 */
6487 IEM_MC_BEGIN(0, 2, 0);
6488 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6490
6491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6493 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6494 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6495
6496 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6497 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6498
6499 IEM_MC_ADVANCE_RIP_AND_FINISH();
6500 IEM_MC_END();
6501 }
6502}
6503
6504/**
6505 * @opcode 0x6f
6506 * @oppfx 0xf3
6507 * @opcpuid sse2
6508 * @opgroup og_sse2_simdint_datamove
6509 * @opxcpttype 4UA
6510 * @optest op1=1 op2=2 -> op1=2
6511 * @optest op1=0 op2=-42 -> op1=-42
6512 */
6513FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6514{
6515 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6516 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6517 if (IEM_IS_MODRM_REG_MODE(bRm))
6518 {
6519 /*
6520 * Register, register.
6521 */
6522 IEM_MC_BEGIN(0, 0, 0);
6523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6524 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6525 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6526 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6527 IEM_GET_MODRM_RM(pVCpu, bRm));
6528 IEM_MC_ADVANCE_RIP_AND_FINISH();
6529 IEM_MC_END();
6530 }
6531 else
6532 {
6533 /*
6534 * Register, memory.
6535 */
6536 IEM_MC_BEGIN(0, 2, 0);
6537 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6539
6540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6542 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6543 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6544 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6545 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6546
6547 IEM_MC_ADVANCE_RIP_AND_FINISH();
6548 IEM_MC_END();
6549 }
6550}
6551
6552
6553/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6554FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6555{
6556 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6557 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6558 if (IEM_IS_MODRM_REG_MODE(bRm))
6559 {
6560 /*
6561 * Register, register.
6562 */
6563 IEM_MC_BEGIN(3, 0, 0);
6564 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6566 IEM_MC_ARG(uint64_t *, pDst, 0);
6567 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6568 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6569 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6570 IEM_MC_PREPARE_FPU_USAGE();
6571 IEM_MC_FPU_TO_MMX_MODE();
6572
6573 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6574 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6575 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6576 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6577
6578 IEM_MC_ADVANCE_RIP_AND_FINISH();
6579 IEM_MC_END();
6580 }
6581 else
6582 {
6583 /*
6584 * Register, memory.
6585 */
6586 IEM_MC_BEGIN(3, 2, 0);
6587 IEM_MC_ARG(uint64_t *, pDst, 0);
6588 IEM_MC_LOCAL(uint64_t, uSrc);
6589 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6591
6592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6593 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6594 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6596 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6597 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6598
6599 IEM_MC_PREPARE_FPU_USAGE();
6600 IEM_MC_FPU_TO_MMX_MODE();
6601
6602 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6603 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6604 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6605
6606 IEM_MC_ADVANCE_RIP_AND_FINISH();
6607 IEM_MC_END();
6608 }
6609}
6610
6611
6612/**
6613 * Common worker for SSE2 instructions on the forms:
6614 * pshufd xmm1, xmm2/mem128, imm8
6615 * pshufhw xmm1, xmm2/mem128, imm8
6616 * pshuflw xmm1, xmm2/mem128, imm8
6617 *
6618 * Proper alignment of the 128-bit operand is enforced.
6619 * Exceptions type 4. SSE2 cpuid checks.
6620 */
6621FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6622{
6623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6624 if (IEM_IS_MODRM_REG_MODE(bRm))
6625 {
6626 /*
6627 * Register, register.
6628 */
6629 IEM_MC_BEGIN(3, 0, 0);
6630 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6632 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6633 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6634 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6635 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6636 IEM_MC_PREPARE_SSE_USAGE();
6637 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6638 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6639 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6640 IEM_MC_ADVANCE_RIP_AND_FINISH();
6641 IEM_MC_END();
6642 }
6643 else
6644 {
6645 /*
6646 * Register, memory.
6647 */
6648 IEM_MC_BEGIN(3, 2, 0);
6649 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6650 IEM_MC_LOCAL(RTUINT128U, uSrc);
6651 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6653
6654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6655 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6656 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6658 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6659
6660 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6661 IEM_MC_PREPARE_SSE_USAGE();
6662 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6663 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6664
6665 IEM_MC_ADVANCE_RIP_AND_FINISH();
6666 IEM_MC_END();
6667 }
6668}
6669
6670
6671/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6672FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6673{
6674 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6675 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6676}
6677
6678
6679/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6680FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6681{
6682 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6683 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6684}
6685
6686
6687/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6688FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6689{
6690 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6691 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6692}
6693
6694
6695/**
6696 * Common worker for MMX instructions of the form:
6697 * psrlw mm, imm8
6698 * psraw mm, imm8
6699 * psllw mm, imm8
6700 * psrld mm, imm8
6701 * psrad mm, imm8
6702 * pslld mm, imm8
6703 * psrlq mm, imm8
6704 * psllq mm, imm8
6705 *
6706 */
6707FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6708{
6709 if (IEM_IS_MODRM_REG_MODE(bRm))
6710 {
6711 /*
6712 * Register, immediate.
6713 */
6714 IEM_MC_BEGIN(2, 0, 0);
6715 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6717 IEM_MC_ARG(uint64_t *, pDst, 0);
6718 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6719 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6720 IEM_MC_PREPARE_FPU_USAGE();
6721 IEM_MC_FPU_TO_MMX_MODE();
6722
6723 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6724 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6725 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6726
6727 IEM_MC_ADVANCE_RIP_AND_FINISH();
6728 IEM_MC_END();
6729 }
6730 else
6731 {
6732 /*
6733 * Register, memory not supported.
6734 */
6735 /// @todo Caller already enforced register mode?!
6736 AssertFailedReturn(VINF_SUCCESS);
6737 }
6738}
6739
6740
6741/**
6742 * Common worker for SSE2 instructions of the form:
6743 * psrlw xmm, imm8
6744 * psraw xmm, imm8
6745 * psllw xmm, imm8
6746 * psrld xmm, imm8
6747 * psrad xmm, imm8
6748 * pslld xmm, imm8
6749 * psrlq xmm, imm8
6750 * psllq xmm, imm8
6751 *
6752 */
6753FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6754{
6755 if (IEM_IS_MODRM_REG_MODE(bRm))
6756 {
6757 /*
6758 * Register, immediate.
6759 */
6760 IEM_MC_BEGIN(2, 0, 0);
6761 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6763 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6764 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6765 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6766 IEM_MC_PREPARE_SSE_USAGE();
6767 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6768 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6769 IEM_MC_ADVANCE_RIP_AND_FINISH();
6770 IEM_MC_END();
6771 }
6772 else
6773 {
6774 /*
6775 * Register, memory.
6776 */
6777 /// @todo Caller already enforced register mode?!
6778 AssertFailedReturn(VINF_SUCCESS);
6779 }
6780}
6781
6782
6783/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6784FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6785{
6786// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6787 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6788}
6789
6790
6791/** Opcode 0x66 0x0f 0x71 11/2. */
6792FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6793{
6794// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6795 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6796}
6797
6798
6799/** Opcode 0x0f 0x71 11/4. */
6800FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6801{
6802// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6803 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6804}
6805
6806
6807/** Opcode 0x66 0x0f 0x71 11/4. */
6808FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6809{
6810// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6811 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6812}
6813
6814
6815/** Opcode 0x0f 0x71 11/6. */
6816FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6817{
6818// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6819 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6820}
6821
6822
6823/** Opcode 0x66 0x0f 0x71 11/6. */
6824FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6825{
6826// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6827 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6828}
6829
6830
6831/**
6832 * Group 12 jump table for register variant.
6833 */
6834IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6835{
6836 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6837 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6838 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6839 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6840 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6841 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6842 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6843 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6844};
6845AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6846
6847
6848/** Opcode 0x0f 0x71. */
6849FNIEMOP_DEF(iemOp_Grp12)
6850{
6851 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6852 if (IEM_IS_MODRM_REG_MODE(bRm))
6853 /* register, register */
6854 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6855 + pVCpu->iem.s.idxPrefix], bRm);
6856 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6857}
6858
6859
6860/** Opcode 0x0f 0x72 11/2. */
6861FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6862{
6863// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6864 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6865}
6866
6867
6868/** Opcode 0x66 0x0f 0x72 11/2. */
6869FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6870{
6871// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6872 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6873}
6874
6875
6876/** Opcode 0x0f 0x72 11/4. */
6877FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6878{
6879// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6880 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6881}
6882
6883
6884/** Opcode 0x66 0x0f 0x72 11/4. */
6885FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6886{
6887// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6888 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6889}
6890
6891
6892/** Opcode 0x0f 0x72 11/6. */
6893FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6894{
6895// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6896 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6897}
6898
6899/** Opcode 0x66 0x0f 0x72 11/6. */
6900FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6901{
6902// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6903 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6904}
6905
6906
6907/**
6908 * Group 13 jump table for register variant.
6909 */
6910IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6911{
6912 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6913 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6914 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6915 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6916 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6917 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6918 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6919 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6920};
6921AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6922
6923/** Opcode 0x0f 0x72. */
6924FNIEMOP_DEF(iemOp_Grp13)
6925{
6926 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6927 if (IEM_IS_MODRM_REG_MODE(bRm))
6928 /* register, register */
6929 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6930 + pVCpu->iem.s.idxPrefix], bRm);
6931 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6932}
6933
6934
6935/** Opcode 0x0f 0x73 11/2. */
6936FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6937{
6938// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6939 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6940}
6941
6942
6943/** Opcode 0x66 0x0f 0x73 11/2. */
6944FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6945{
6946// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6947 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6948}
6949
6950
6951/** Opcode 0x66 0x0f 0x73 11/3. */
6952FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6953{
6954// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6955 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6956}
6957
6958
6959/** Opcode 0x0f 0x73 11/6. */
6960FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6961{
6962// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6963 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6964}
6965
6966
6967/** Opcode 0x66 0x0f 0x73 11/6. */
6968FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6969{
6970// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6971 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
6972}
6973
6974
6975/** Opcode 0x66 0x0f 0x73 11/7. */
6976FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6977{
6978// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6979 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
6980}
6981
6982/**
6983 * Group 14 jump table for register variant.
6984 */
6985IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6986{
6987 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6988 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6989 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6990 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6991 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6992 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6993 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6994 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6995};
6996AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6997
6998
6999/** Opcode 0x0f 0x73. */
7000FNIEMOP_DEF(iemOp_Grp14)
7001{
7002 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7003 if (IEM_IS_MODRM_REG_MODE(bRm))
7004 /* register, register */
7005 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7006 + pVCpu->iem.s.idxPrefix], bRm);
7007 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
7008}
7009
7010
7011/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
7012FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
7013{
7014 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7015 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
7016}
7017
7018
7019/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
7020FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
7021{
7022 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7023 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
7024}
7025
7026
7027/* Opcode 0xf3 0x0f 0x74 - invalid */
7028/* Opcode 0xf2 0x0f 0x74 - invalid */
7029
7030
7031/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
7032FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
7033{
7034 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7035 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
7036}
7037
7038
7039/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
7040FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
7041{
7042 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7043 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
7044}
7045
7046
7047/* Opcode 0xf3 0x0f 0x75 - invalid */
7048/* Opcode 0xf2 0x0f 0x75 - invalid */
7049
7050
7051/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
7052FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
7053{
7054 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7055 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
7056}
7057
7058
7059/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
7060FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
7061{
7062 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7063 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
7064}
7065
7066
7067/* Opcode 0xf3 0x0f 0x76 - invalid */
7068/* Opcode 0xf2 0x0f 0x76 - invalid */
7069
7070
7071/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
7072FNIEMOP_DEF(iemOp_emms)
7073{
7074 IEMOP_MNEMONIC(emms, "emms");
7075 IEM_MC_BEGIN(0, 0, 0);
7076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7077 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7078 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7079 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7080 IEM_MC_FPU_FROM_MMX_MODE();
7081 IEM_MC_ADVANCE_RIP_AND_FINISH();
7082 IEM_MC_END();
7083}
7084
7085/* Opcode 0x66 0x0f 0x77 - invalid */
7086/* Opcode 0xf3 0x0f 0x77 - invalid */
7087/* Opcode 0xf2 0x0f 0x77 - invalid */
7088
7089/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
7090#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7091FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
7092{
7093 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
7094 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
7095 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
7096 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7097
7098 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7099 if (IEM_IS_MODRM_REG_MODE(bRm))
7100 {
7101 /*
7102 * Register, register.
7103 */
7104 if (enmEffOpSize == IEMMODE_64BIT)
7105 {
7106 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT);
7107 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7108 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7109 IEM_MC_ARG(uint64_t, u64Enc, 1);
7110 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7111 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7112 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmread_reg64, pu64Dst, u64Enc);
7113 IEM_MC_END();
7114 }
7115 else
7116 {
7117 IEM_MC_BEGIN(2, 0, 0);
7118 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7119 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7120 IEM_MC_ARG(uint32_t, u32Enc, 1);
7121 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7122 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7123 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmread_reg32, pu64Dst, u32Enc);
7124 IEM_MC_END();
7125 }
7126 }
7127 else
7128 {
7129 /*
7130 * Memory, register.
7131 */
7132 if (enmEffOpSize == IEMMODE_64BIT)
7133 {
7134 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT);
7135 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7136 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7137 IEM_MC_ARG(uint64_t, u64Enc, 2);
7138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7139 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7140 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7141 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7142 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7143 iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7144 IEM_MC_END();
7145 }
7146 else
7147 {
7148 IEM_MC_BEGIN(3, 0, 0);
7149 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7150 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7151 IEM_MC_ARG(uint32_t, u32Enc, 2);
7152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7153 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7154 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7155 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7156 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7157 iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7158 IEM_MC_END();
7159 }
7160 }
7161}
7162#else
7163FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
7164#endif
7165
7166/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7167FNIEMOP_STUB(iemOp_AmdGrp17);
7168/* Opcode 0xf3 0x0f 0x78 - invalid */
7169/* Opcode 0xf2 0x0f 0x78 - invalid */
7170
7171/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7172#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7173FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7174{
7175 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7176 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7177 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7178 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7179
7180 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7181 if (IEM_IS_MODRM_REG_MODE(bRm))
7182 {
7183 /*
7184 * Register, register.
7185 */
7186 if (enmEffOpSize == IEMMODE_64BIT)
7187 {
7188 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT);
7189 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7190 IEM_MC_ARG(uint64_t, u64Val, 0);
7191 IEM_MC_ARG(uint64_t, u64Enc, 1);
7192 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7193 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7194 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmwrite_reg, u64Val, u64Enc);
7195 IEM_MC_END();
7196 }
7197 else
7198 {
7199 IEM_MC_BEGIN(2, 0, 0);
7200 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7201 IEM_MC_ARG(uint32_t, u32Val, 0);
7202 IEM_MC_ARG(uint32_t, u32Enc, 1);
7203 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7204 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7205 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmwrite_reg, u32Val, u32Enc);
7206 IEM_MC_END();
7207 }
7208 }
7209 else
7210 {
7211 /*
7212 * Register, memory.
7213 */
7214 if (enmEffOpSize == IEMMODE_64BIT)
7215 {
7216 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT);
7217 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7218 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7219 IEM_MC_ARG(uint64_t, u64Enc, 2);
7220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7221 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7222 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7223 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7224 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7225 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7226 IEM_MC_END();
7227 }
7228 else
7229 {
7230 IEM_MC_BEGIN(3, 0, 0);
7231 IEM_MC_ARG(uint8_t, iEffSeg, 0);
7232 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7233 IEM_MC_ARG(uint32_t, u32Enc, 2);
7234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7235 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7236 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7237 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7238 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7239 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7240 IEM_MC_END();
7241 }
7242 }
7243}
7244#else
7245FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
7246#endif
7247/* Opcode 0x66 0x0f 0x79 - invalid */
7248/* Opcode 0xf3 0x0f 0x79 - invalid */
7249/* Opcode 0xf2 0x0f 0x79 - invalid */
7250
7251/* Opcode 0x0f 0x7a - invalid */
7252/* Opcode 0x66 0x0f 0x7a - invalid */
7253/* Opcode 0xf3 0x0f 0x7a - invalid */
7254/* Opcode 0xf2 0x0f 0x7a - invalid */
7255
7256/* Opcode 0x0f 0x7b - invalid */
7257/* Opcode 0x66 0x0f 0x7b - invalid */
7258/* Opcode 0xf3 0x0f 0x7b - invalid */
7259/* Opcode 0xf2 0x0f 0x7b - invalid */
7260
7261/* Opcode 0x0f 0x7c - invalid */
7262
7263
7264/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7265FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7266{
7267 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7268 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7269}
7270
7271
7272/* Opcode 0xf3 0x0f 0x7c - invalid */
7273
7274
7275/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7276FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7277{
7278 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7279 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7280}
7281
7282
7283/* Opcode 0x0f 0x7d - invalid */
7284
7285
7286/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7287FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7288{
7289 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7290 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7291}
7292
7293
7294/* Opcode 0xf3 0x0f 0x7d - invalid */
7295
7296
7297/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7298FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7299{
7300 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7301 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7302}
7303
7304
7305/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7306FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7307{
7308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7309 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7310 {
7311 /**
7312 * @opcode 0x7e
7313 * @opcodesub rex.w=1
7314 * @oppfx none
7315 * @opcpuid mmx
7316 * @opgroup og_mmx_datamove
7317 * @opxcpttype 5
7318 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7319 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7320 */
7321 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7322 if (IEM_IS_MODRM_REG_MODE(bRm))
7323 {
7324 /* greg64, MMX */
7325 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
7326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7327 IEM_MC_LOCAL(uint64_t, u64Tmp);
7328
7329 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7330 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7331 IEM_MC_FPU_TO_MMX_MODE();
7332
7333 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7334 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7335
7336 IEM_MC_ADVANCE_RIP_AND_FINISH();
7337 IEM_MC_END();
7338 }
7339 else
7340 {
7341 /* [mem64], MMX */
7342 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
7343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7344 IEM_MC_LOCAL(uint64_t, u64Tmp);
7345
7346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7348 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7349 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7350 IEM_MC_FPU_TO_MMX_MODE();
7351
7352 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7353 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7354
7355 IEM_MC_ADVANCE_RIP_AND_FINISH();
7356 IEM_MC_END();
7357 }
7358 }
7359 else
7360 {
7361 /**
7362 * @opdone
7363 * @opcode 0x7e
7364 * @opcodesub rex.w=0
7365 * @oppfx none
7366 * @opcpuid mmx
7367 * @opgroup og_mmx_datamove
7368 * @opxcpttype 5
7369 * @opfunction iemOp_movd_q_Pd_Ey
7370 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7371 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7372 */
7373 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7374 if (IEM_IS_MODRM_REG_MODE(bRm))
7375 {
7376 /* greg32, MMX */
7377 IEM_MC_BEGIN(0, 1, 0);
7378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7379 IEM_MC_LOCAL(uint32_t, u32Tmp);
7380
7381 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7382 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7383 IEM_MC_FPU_TO_MMX_MODE();
7384
7385 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7386 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7387
7388 IEM_MC_ADVANCE_RIP_AND_FINISH();
7389 IEM_MC_END();
7390 }
7391 else
7392 {
7393 /* [mem32], MMX */
7394 IEM_MC_BEGIN(0, 2, 0);
7395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7396 IEM_MC_LOCAL(uint32_t, u32Tmp);
7397
7398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7400 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7401 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7402 IEM_MC_FPU_TO_MMX_MODE();
7403
7404 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7405 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7406
7407 IEM_MC_ADVANCE_RIP_AND_FINISH();
7408 IEM_MC_END();
7409 }
7410 }
7411}
7412
7413
7414FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7415{
7416 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7417 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7418 {
7419 /**
7420 * @opcode 0x7e
7421 * @opcodesub rex.w=1
7422 * @oppfx 0x66
7423 * @opcpuid sse2
7424 * @opgroup og_sse2_simdint_datamove
7425 * @opxcpttype 5
7426 * @optest 64-bit / op1=1 op2=2 -> op1=2
7427 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7428 */
7429 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7430 if (IEM_IS_MODRM_REG_MODE(bRm))
7431 {
7432 /* greg64, XMM */
7433 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
7434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7435 IEM_MC_LOCAL(uint64_t, u64Tmp);
7436
7437 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7438 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7439
7440 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7441 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7442
7443 IEM_MC_ADVANCE_RIP_AND_FINISH();
7444 IEM_MC_END();
7445 }
7446 else
7447 {
7448 /* [mem64], XMM */
7449 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
7450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7451 IEM_MC_LOCAL(uint64_t, u64Tmp);
7452
7453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7455 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7456 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7457
7458 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7459 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7460
7461 IEM_MC_ADVANCE_RIP_AND_FINISH();
7462 IEM_MC_END();
7463 }
7464 }
7465 else
7466 {
7467 /**
7468 * @opdone
7469 * @opcode 0x7e
7470 * @opcodesub rex.w=0
7471 * @oppfx 0x66
7472 * @opcpuid sse2
7473 * @opgroup og_sse2_simdint_datamove
7474 * @opxcpttype 5
7475 * @opfunction iemOp_movd_q_Vy_Ey
7476 * @optest op1=1 op2=2 -> op1=2
7477 * @optest op1=0 op2=-42 -> op1=-42
7478 */
7479 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7480 if (IEM_IS_MODRM_REG_MODE(bRm))
7481 {
7482 /* greg32, XMM */
7483 IEM_MC_BEGIN(0, 1, 0);
7484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7485 IEM_MC_LOCAL(uint32_t, u32Tmp);
7486
7487 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7488 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7489
7490 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7491 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7492
7493 IEM_MC_ADVANCE_RIP_AND_FINISH();
7494 IEM_MC_END();
7495 }
7496 else
7497 {
7498 /* [mem32], XMM */
7499 IEM_MC_BEGIN(0, 2, 0);
7500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7501 IEM_MC_LOCAL(uint32_t, u32Tmp);
7502
7503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7505 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7506 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7507
7508 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7509 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7510
7511 IEM_MC_ADVANCE_RIP_AND_FINISH();
7512 IEM_MC_END();
7513 }
7514 }
7515}
7516
7517/**
7518 * @opcode 0x7e
7519 * @oppfx 0xf3
7520 * @opcpuid sse2
7521 * @opgroup og_sse2_pcksclr_datamove
7522 * @opxcpttype none
7523 * @optest op1=1 op2=2 -> op1=2
7524 * @optest op1=0 op2=-42 -> op1=-42
7525 */
7526FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7527{
7528 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7529 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7530 if (IEM_IS_MODRM_REG_MODE(bRm))
7531 {
7532 /*
7533 * XMM128, XMM64.
7534 */
7535 IEM_MC_BEGIN(0, 2, 0);
7536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7537 IEM_MC_LOCAL(uint64_t, uSrc);
7538
7539 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7540 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7541
7542 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7543 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7544
7545 IEM_MC_ADVANCE_RIP_AND_FINISH();
7546 IEM_MC_END();
7547 }
7548 else
7549 {
7550 /*
7551 * XMM128, [mem64].
7552 */
7553 IEM_MC_BEGIN(0, 2, 0);
7554 IEM_MC_LOCAL(uint64_t, uSrc);
7555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7556
7557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7559 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7560 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7561
7562 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7563 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7564
7565 IEM_MC_ADVANCE_RIP_AND_FINISH();
7566 IEM_MC_END();
7567 }
7568}
7569
7570/* Opcode 0xf2 0x0f 0x7e - invalid */
7571
7572
7573/** Opcode 0x0f 0x7f - movq Qq, Pq */
7574FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7575{
7576 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7577 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7578 if (IEM_IS_MODRM_REG_MODE(bRm))
7579 {
7580 /*
7581 * MMX, MMX.
7582 */
7583 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7584 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7585 IEM_MC_BEGIN(0, 1, 0);
7586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7587 IEM_MC_LOCAL(uint64_t, u64Tmp);
7588 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7589 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7590 IEM_MC_FPU_TO_MMX_MODE();
7591
7592 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7593 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7594
7595 IEM_MC_ADVANCE_RIP_AND_FINISH();
7596 IEM_MC_END();
7597 }
7598 else
7599 {
7600 /*
7601 * [mem64], MMX.
7602 */
7603 IEM_MC_BEGIN(0, 2, 0);
7604 IEM_MC_LOCAL(uint64_t, u64Tmp);
7605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7606
7607 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7609 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7610 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7611 IEM_MC_FPU_TO_MMX_MODE();
7612
7613 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7614 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7615
7616 IEM_MC_ADVANCE_RIP_AND_FINISH();
7617 IEM_MC_END();
7618 }
7619}
7620
7621/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7622FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7623{
7624 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7625 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7626 if (IEM_IS_MODRM_REG_MODE(bRm))
7627 {
7628 /*
7629 * XMM, XMM.
7630 */
7631 IEM_MC_BEGIN(0, 0, 0);
7632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7633 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7634 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7635 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7636 IEM_GET_MODRM_REG(pVCpu, bRm));
7637 IEM_MC_ADVANCE_RIP_AND_FINISH();
7638 IEM_MC_END();
7639 }
7640 else
7641 {
7642 /*
7643 * [mem128], XMM.
7644 */
7645 IEM_MC_BEGIN(0, 2, 0);
7646 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7648
7649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7651 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7653
7654 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7655 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7656
7657 IEM_MC_ADVANCE_RIP_AND_FINISH();
7658 IEM_MC_END();
7659 }
7660}
7661
7662/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7663FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7664{
7665 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7667 if (IEM_IS_MODRM_REG_MODE(bRm))
7668 {
7669 /*
7670 * XMM, XMM.
7671 */
7672 IEM_MC_BEGIN(0, 0, 0);
7673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7674 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7675 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7676 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7677 IEM_GET_MODRM_REG(pVCpu, bRm));
7678 IEM_MC_ADVANCE_RIP_AND_FINISH();
7679 IEM_MC_END();
7680 }
7681 else
7682 {
7683 /*
7684 * [mem128], XMM.
7685 */
7686 IEM_MC_BEGIN(0, 2, 0);
7687 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7689
7690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7692 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7693 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7694
7695 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7696 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7697
7698 IEM_MC_ADVANCE_RIP_AND_FINISH();
7699 IEM_MC_END();
7700 }
7701}
7702
7703/* Opcode 0xf2 0x0f 0x7f - invalid */
7704
7705
7706
7707/** Opcode 0x0f 0x80. */
7708FNIEMOP_DEF(iemOp_jo_Jv)
7709{
7710 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7711 IEMOP_HLP_MIN_386();
7712 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7713 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7714 {
7715 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7716 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7718 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7719 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7720 } IEM_MC_ELSE() {
7721 IEM_MC_ADVANCE_RIP_AND_FINISH();
7722 } IEM_MC_ENDIF();
7723 IEM_MC_END();
7724 }
7725 else
7726 {
7727 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7728 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7730 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7731 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7732 } IEM_MC_ELSE() {
7733 IEM_MC_ADVANCE_RIP_AND_FINISH();
7734 } IEM_MC_ENDIF();
7735 IEM_MC_END();
7736 }
7737}
7738
7739
7740/** Opcode 0x0f 0x81. */
7741FNIEMOP_DEF(iemOp_jno_Jv)
7742{
7743 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7744 IEMOP_HLP_MIN_386();
7745 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7746 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7747 {
7748 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7749 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7751 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7752 IEM_MC_ADVANCE_RIP_AND_FINISH();
7753 } IEM_MC_ELSE() {
7754 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7755 } IEM_MC_ENDIF();
7756 IEM_MC_END();
7757 }
7758 else
7759 {
7760 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7761 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7763 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7764 IEM_MC_ADVANCE_RIP_AND_FINISH();
7765 } IEM_MC_ELSE() {
7766 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7767 } IEM_MC_ENDIF();
7768 IEM_MC_END();
7769 }
7770}
7771
7772
7773/** Opcode 0x0f 0x82. */
7774FNIEMOP_DEF(iemOp_jc_Jv)
7775{
7776 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7777 IEMOP_HLP_MIN_386();
7778 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7779 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7780 {
7781 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7782 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7784 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7785 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7786 } IEM_MC_ELSE() {
7787 IEM_MC_ADVANCE_RIP_AND_FINISH();
7788 } IEM_MC_ENDIF();
7789 IEM_MC_END();
7790 }
7791 else
7792 {
7793 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7794 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7796 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7797 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7798 } IEM_MC_ELSE() {
7799 IEM_MC_ADVANCE_RIP_AND_FINISH();
7800 } IEM_MC_ENDIF();
7801 IEM_MC_END();
7802 }
7803}
7804
7805
7806/** Opcode 0x0f 0x83. */
7807FNIEMOP_DEF(iemOp_jnc_Jv)
7808{
7809 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7810 IEMOP_HLP_MIN_386();
7811 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7812 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7813 {
7814 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7815 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7817 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7818 IEM_MC_ADVANCE_RIP_AND_FINISH();
7819 } IEM_MC_ELSE() {
7820 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7821 } IEM_MC_ENDIF();
7822 IEM_MC_END();
7823 }
7824 else
7825 {
7826 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7827 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7829 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7830 IEM_MC_ADVANCE_RIP_AND_FINISH();
7831 } IEM_MC_ELSE() {
7832 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7833 } IEM_MC_ENDIF();
7834 IEM_MC_END();
7835 }
7836}
7837
7838
7839/** Opcode 0x0f 0x84. */
7840FNIEMOP_DEF(iemOp_je_Jv)
7841{
7842 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7843 IEMOP_HLP_MIN_386();
7844 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7845 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7846 {
7847 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7848 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7850 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7851 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7852 } IEM_MC_ELSE() {
7853 IEM_MC_ADVANCE_RIP_AND_FINISH();
7854 } IEM_MC_ENDIF();
7855 IEM_MC_END();
7856 }
7857 else
7858 {
7859 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7860 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7862 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7863 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7864 } IEM_MC_ELSE() {
7865 IEM_MC_ADVANCE_RIP_AND_FINISH();
7866 } IEM_MC_ENDIF();
7867 IEM_MC_END();
7868 }
7869}
7870
7871
7872/** Opcode 0x0f 0x85. */
7873FNIEMOP_DEF(iemOp_jne_Jv)
7874{
7875 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7876 IEMOP_HLP_MIN_386();
7877 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7878 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7879 {
7880 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7881 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7883 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7884 IEM_MC_ADVANCE_RIP_AND_FINISH();
7885 } IEM_MC_ELSE() {
7886 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7887 } IEM_MC_ENDIF();
7888 IEM_MC_END();
7889 }
7890 else
7891 {
7892 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7893 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7895 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7896 IEM_MC_ADVANCE_RIP_AND_FINISH();
7897 } IEM_MC_ELSE() {
7898 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7899 } IEM_MC_ENDIF();
7900 IEM_MC_END();
7901 }
7902}
7903
7904
7905/** Opcode 0x0f 0x86. */
7906FNIEMOP_DEF(iemOp_jbe_Jv)
7907{
7908 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7909 IEMOP_HLP_MIN_386();
7910 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7911 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7912 {
7913 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7914 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7916 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7917 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7918 } IEM_MC_ELSE() {
7919 IEM_MC_ADVANCE_RIP_AND_FINISH();
7920 } IEM_MC_ENDIF();
7921 IEM_MC_END();
7922 }
7923 else
7924 {
7925 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7926 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7928 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7929 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7930 } IEM_MC_ELSE() {
7931 IEM_MC_ADVANCE_RIP_AND_FINISH();
7932 } IEM_MC_ENDIF();
7933 IEM_MC_END();
7934 }
7935}
7936
7937
7938/** Opcode 0x0f 0x87. */
7939FNIEMOP_DEF(iemOp_jnbe_Jv)
7940{
7941 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7942 IEMOP_HLP_MIN_386();
7943 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7944 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7945 {
7946 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7947 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7949 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7950 IEM_MC_ADVANCE_RIP_AND_FINISH();
7951 } IEM_MC_ELSE() {
7952 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7953 } IEM_MC_ENDIF();
7954 IEM_MC_END();
7955 }
7956 else
7957 {
7958 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7959 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7961 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7962 IEM_MC_ADVANCE_RIP_AND_FINISH();
7963 } IEM_MC_ELSE() {
7964 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7965 } IEM_MC_ENDIF();
7966 IEM_MC_END();
7967 }
7968}
7969
7970
7971/** Opcode 0x0f 0x88. */
7972FNIEMOP_DEF(iemOp_js_Jv)
7973{
7974 IEMOP_MNEMONIC(js_Jv, "js Jv");
7975 IEMOP_HLP_MIN_386();
7976 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7977 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7978 {
7979 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7980 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7982 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7983 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7984 } IEM_MC_ELSE() {
7985 IEM_MC_ADVANCE_RIP_AND_FINISH();
7986 } IEM_MC_ENDIF();
7987 IEM_MC_END();
7988 }
7989 else
7990 {
7991 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
7992 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7994 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7995 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7996 } IEM_MC_ELSE() {
7997 IEM_MC_ADVANCE_RIP_AND_FINISH();
7998 } IEM_MC_ENDIF();
7999 IEM_MC_END();
8000 }
8001}
8002
8003
8004/** Opcode 0x0f 0x89. */
8005FNIEMOP_DEF(iemOp_jns_Jv)
8006{
8007 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
8008 IEMOP_HLP_MIN_386();
8009 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8010 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8011 {
8012 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8013 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8015 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8016 IEM_MC_ADVANCE_RIP_AND_FINISH();
8017 } IEM_MC_ELSE() {
8018 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8019 } IEM_MC_ENDIF();
8020 IEM_MC_END();
8021 }
8022 else
8023 {
8024 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8025 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8027 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8028 IEM_MC_ADVANCE_RIP_AND_FINISH();
8029 } IEM_MC_ELSE() {
8030 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8031 } IEM_MC_ENDIF();
8032 IEM_MC_END();
8033 }
8034}
8035
8036
8037/** Opcode 0x0f 0x8a. */
8038FNIEMOP_DEF(iemOp_jp_Jv)
8039{
8040 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
8041 IEMOP_HLP_MIN_386();
8042 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8043 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8044 {
8045 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8046 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8048 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8049 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8050 } IEM_MC_ELSE() {
8051 IEM_MC_ADVANCE_RIP_AND_FINISH();
8052 } IEM_MC_ENDIF();
8053 IEM_MC_END();
8054 }
8055 else
8056 {
8057 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8058 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8060 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8061 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8062 } IEM_MC_ELSE() {
8063 IEM_MC_ADVANCE_RIP_AND_FINISH();
8064 } IEM_MC_ENDIF();
8065 IEM_MC_END();
8066 }
8067}
8068
8069
8070/** Opcode 0x0f 0x8b. */
8071FNIEMOP_DEF(iemOp_jnp_Jv)
8072{
8073 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
8074 IEMOP_HLP_MIN_386();
8075 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8076 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8077 {
8078 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8079 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8081 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8082 IEM_MC_ADVANCE_RIP_AND_FINISH();
8083 } IEM_MC_ELSE() {
8084 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8085 } IEM_MC_ENDIF();
8086 IEM_MC_END();
8087 }
8088 else
8089 {
8090 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8091 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8093 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8094 IEM_MC_ADVANCE_RIP_AND_FINISH();
8095 } IEM_MC_ELSE() {
8096 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8097 } IEM_MC_ENDIF();
8098 IEM_MC_END();
8099 }
8100}
8101
8102
8103/** Opcode 0x0f 0x8c. */
8104FNIEMOP_DEF(iemOp_jl_Jv)
8105{
8106 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
8107 IEMOP_HLP_MIN_386();
8108 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8109 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8110 {
8111 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8112 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8114 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8115 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8116 } IEM_MC_ELSE() {
8117 IEM_MC_ADVANCE_RIP_AND_FINISH();
8118 } IEM_MC_ENDIF();
8119 IEM_MC_END();
8120 }
8121 else
8122 {
8123 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8124 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8126 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8127 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8128 } IEM_MC_ELSE() {
8129 IEM_MC_ADVANCE_RIP_AND_FINISH();
8130 } IEM_MC_ENDIF();
8131 IEM_MC_END();
8132 }
8133}
8134
8135
8136/** Opcode 0x0f 0x8d. */
8137FNIEMOP_DEF(iemOp_jnl_Jv)
8138{
8139 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8140 IEMOP_HLP_MIN_386();
8141 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8142 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8143 {
8144 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8145 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8147 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8148 IEM_MC_ADVANCE_RIP_AND_FINISH();
8149 } IEM_MC_ELSE() {
8150 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8151 } IEM_MC_ENDIF();
8152 IEM_MC_END();
8153 }
8154 else
8155 {
8156 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8157 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8159 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8160 IEM_MC_ADVANCE_RIP_AND_FINISH();
8161 } IEM_MC_ELSE() {
8162 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8163 } IEM_MC_ENDIF();
8164 IEM_MC_END();
8165 }
8166}
8167
8168
8169/** Opcode 0x0f 0x8e. */
8170FNIEMOP_DEF(iemOp_jle_Jv)
8171{
8172 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8173 IEMOP_HLP_MIN_386();
8174 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8175 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8176 {
8177 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8178 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8180 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8181 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8182 } IEM_MC_ELSE() {
8183 IEM_MC_ADVANCE_RIP_AND_FINISH();
8184 } IEM_MC_ENDIF();
8185 IEM_MC_END();
8186 }
8187 else
8188 {
8189 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8190 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8192 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8193 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8194 } IEM_MC_ELSE() {
8195 IEM_MC_ADVANCE_RIP_AND_FINISH();
8196 } IEM_MC_ENDIF();
8197 IEM_MC_END();
8198 }
8199}
8200
8201
8202/** Opcode 0x0f 0x8f. */
8203FNIEMOP_DEF(iemOp_jnle_Jv)
8204{
8205 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8206 IEMOP_HLP_MIN_386();
8207 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8208 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8209 {
8210 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8211 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8213 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8214 IEM_MC_ADVANCE_RIP_AND_FINISH();
8215 } IEM_MC_ELSE() {
8216 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8217 } IEM_MC_ENDIF();
8218 IEM_MC_END();
8219 }
8220 else
8221 {
8222 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8223 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8225 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8226 IEM_MC_ADVANCE_RIP_AND_FINISH();
8227 } IEM_MC_ELSE() {
8228 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8229 } IEM_MC_ENDIF();
8230 IEM_MC_END();
8231 }
8232}
8233
8234
8235/** Opcode 0x0f 0x90. */
8236FNIEMOP_DEF(iemOp_seto_Eb)
8237{
8238 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8239 IEMOP_HLP_MIN_386();
8240 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8241
8242 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8243 * any way. AMD says it's "unused", whatever that means. We're
8244 * ignoring for now. */
8245 if (IEM_IS_MODRM_REG_MODE(bRm))
8246 {
8247 /* register target */
8248 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8250 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8251 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8252 } IEM_MC_ELSE() {
8253 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8254 } IEM_MC_ENDIF();
8255 IEM_MC_ADVANCE_RIP_AND_FINISH();
8256 IEM_MC_END();
8257 }
8258 else
8259 {
8260 /* memory target */
8261 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8265 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8266 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8267 } IEM_MC_ELSE() {
8268 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8269 } IEM_MC_ENDIF();
8270 IEM_MC_ADVANCE_RIP_AND_FINISH();
8271 IEM_MC_END();
8272 }
8273}
8274
8275
8276/** Opcode 0x0f 0x91. */
8277FNIEMOP_DEF(iemOp_setno_Eb)
8278{
8279 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8280 IEMOP_HLP_MIN_386();
8281 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8282
8283 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8284 * any way. AMD says it's "unused", whatever that means. We're
8285 * ignoring for now. */
8286 if (IEM_IS_MODRM_REG_MODE(bRm))
8287 {
8288 /* register target */
8289 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8291 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8292 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8293 } IEM_MC_ELSE() {
8294 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8295 } IEM_MC_ENDIF();
8296 IEM_MC_ADVANCE_RIP_AND_FINISH();
8297 IEM_MC_END();
8298 }
8299 else
8300 {
8301 /* memory target */
8302 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8306 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8307 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8308 } IEM_MC_ELSE() {
8309 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8310 } IEM_MC_ENDIF();
8311 IEM_MC_ADVANCE_RIP_AND_FINISH();
8312 IEM_MC_END();
8313 }
8314}
8315
8316
8317/** Opcode 0x0f 0x92. */
8318FNIEMOP_DEF(iemOp_setc_Eb)
8319{
8320 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8321 IEMOP_HLP_MIN_386();
8322 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8323
8324 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8325 * any way. AMD says it's "unused", whatever that means. We're
8326 * ignoring for now. */
8327 if (IEM_IS_MODRM_REG_MODE(bRm))
8328 {
8329 /* register target */
8330 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8332 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8333 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8334 } IEM_MC_ELSE() {
8335 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8336 } IEM_MC_ENDIF();
8337 IEM_MC_ADVANCE_RIP_AND_FINISH();
8338 IEM_MC_END();
8339 }
8340 else
8341 {
8342 /* memory target */
8343 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8347 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8348 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8349 } IEM_MC_ELSE() {
8350 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8351 } IEM_MC_ENDIF();
8352 IEM_MC_ADVANCE_RIP_AND_FINISH();
8353 IEM_MC_END();
8354 }
8355}
8356
8357
8358/** Opcode 0x0f 0x93. */
8359FNIEMOP_DEF(iemOp_setnc_Eb)
8360{
8361 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8362 IEMOP_HLP_MIN_386();
8363 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8364
8365 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8366 * any way. AMD says it's "unused", whatever that means. We're
8367 * ignoring for now. */
8368 if (IEM_IS_MODRM_REG_MODE(bRm))
8369 {
8370 /* register target */
8371 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8373 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8374 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8375 } IEM_MC_ELSE() {
8376 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8377 } IEM_MC_ENDIF();
8378 IEM_MC_ADVANCE_RIP_AND_FINISH();
8379 IEM_MC_END();
8380 }
8381 else
8382 {
8383 /* memory target */
8384 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8388 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8389 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8390 } IEM_MC_ELSE() {
8391 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8392 } IEM_MC_ENDIF();
8393 IEM_MC_ADVANCE_RIP_AND_FINISH();
8394 IEM_MC_END();
8395 }
8396}
8397
8398
8399/** Opcode 0x0f 0x94. */
8400FNIEMOP_DEF(iemOp_sete_Eb)
8401{
8402 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8403 IEMOP_HLP_MIN_386();
8404 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8405
8406 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8407 * any way. AMD says it's "unused", whatever that means. We're
8408 * ignoring for now. */
8409 if (IEM_IS_MODRM_REG_MODE(bRm))
8410 {
8411 /* register target */
8412 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8414 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8415 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8416 } IEM_MC_ELSE() {
8417 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8418 } IEM_MC_ENDIF();
8419 IEM_MC_ADVANCE_RIP_AND_FINISH();
8420 IEM_MC_END();
8421 }
8422 else
8423 {
8424 /* memory target */
8425 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8429 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8430 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8431 } IEM_MC_ELSE() {
8432 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8433 } IEM_MC_ENDIF();
8434 IEM_MC_ADVANCE_RIP_AND_FINISH();
8435 IEM_MC_END();
8436 }
8437}
8438
8439
8440/** Opcode 0x0f 0x95. */
8441FNIEMOP_DEF(iemOp_setne_Eb)
8442{
8443 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8444 IEMOP_HLP_MIN_386();
8445 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8446
8447 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8448 * any way. AMD says it's "unused", whatever that means. We're
8449 * ignoring for now. */
8450 if (IEM_IS_MODRM_REG_MODE(bRm))
8451 {
8452 /* register target */
8453 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8455 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8456 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8457 } IEM_MC_ELSE() {
8458 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8459 } IEM_MC_ENDIF();
8460 IEM_MC_ADVANCE_RIP_AND_FINISH();
8461 IEM_MC_END();
8462 }
8463 else
8464 {
8465 /* memory target */
8466 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8467 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8470 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8471 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8472 } IEM_MC_ELSE() {
8473 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8474 } IEM_MC_ENDIF();
8475 IEM_MC_ADVANCE_RIP_AND_FINISH();
8476 IEM_MC_END();
8477 }
8478}
8479
8480
8481/** Opcode 0x0f 0x96. */
8482FNIEMOP_DEF(iemOp_setbe_Eb)
8483{
8484 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8485 IEMOP_HLP_MIN_386();
8486 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8487
8488 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8489 * any way. AMD says it's "unused", whatever that means. We're
8490 * ignoring for now. */
8491 if (IEM_IS_MODRM_REG_MODE(bRm))
8492 {
8493 /* register target */
8494 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8496 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8497 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8498 } IEM_MC_ELSE() {
8499 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8500 } IEM_MC_ENDIF();
8501 IEM_MC_ADVANCE_RIP_AND_FINISH();
8502 IEM_MC_END();
8503 }
8504 else
8505 {
8506 /* memory target */
8507 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8511 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8512 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8513 } IEM_MC_ELSE() {
8514 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8515 } IEM_MC_ENDIF();
8516 IEM_MC_ADVANCE_RIP_AND_FINISH();
8517 IEM_MC_END();
8518 }
8519}
8520
8521
8522/** Opcode 0x0f 0x97. */
8523FNIEMOP_DEF(iemOp_setnbe_Eb)
8524{
8525 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8526 IEMOP_HLP_MIN_386();
8527 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8528
8529 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8530 * any way. AMD says it's "unused", whatever that means. We're
8531 * ignoring for now. */
8532 if (IEM_IS_MODRM_REG_MODE(bRm))
8533 {
8534 /* register target */
8535 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8537 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8538 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8539 } IEM_MC_ELSE() {
8540 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8541 } IEM_MC_ENDIF();
8542 IEM_MC_ADVANCE_RIP_AND_FINISH();
8543 IEM_MC_END();
8544 }
8545 else
8546 {
8547 /* memory target */
8548 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8552 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8553 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8554 } IEM_MC_ELSE() {
8555 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8556 } IEM_MC_ENDIF();
8557 IEM_MC_ADVANCE_RIP_AND_FINISH();
8558 IEM_MC_END();
8559 }
8560}
8561
8562
8563/** Opcode 0x0f 0x98. */
8564FNIEMOP_DEF(iemOp_sets_Eb)
8565{
8566 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8567 IEMOP_HLP_MIN_386();
8568 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8569
8570 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8571 * any way. AMD says it's "unused", whatever that means. We're
8572 * ignoring for now. */
8573 if (IEM_IS_MODRM_REG_MODE(bRm))
8574 {
8575 /* register target */
8576 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8578 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8579 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8580 } IEM_MC_ELSE() {
8581 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8582 } IEM_MC_ENDIF();
8583 IEM_MC_ADVANCE_RIP_AND_FINISH();
8584 IEM_MC_END();
8585 }
8586 else
8587 {
8588 /* memory target */
8589 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8593 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8594 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8595 } IEM_MC_ELSE() {
8596 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8597 } IEM_MC_ENDIF();
8598 IEM_MC_ADVANCE_RIP_AND_FINISH();
8599 IEM_MC_END();
8600 }
8601}
8602
8603
8604/** Opcode 0x0f 0x99. */
8605FNIEMOP_DEF(iemOp_setns_Eb)
8606{
8607 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8608 IEMOP_HLP_MIN_386();
8609 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8610
8611 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8612 * any way. AMD says it's "unused", whatever that means. We're
8613 * ignoring for now. */
8614 if (IEM_IS_MODRM_REG_MODE(bRm))
8615 {
8616 /* register target */
8617 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8619 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8620 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8621 } IEM_MC_ELSE() {
8622 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8623 } IEM_MC_ENDIF();
8624 IEM_MC_ADVANCE_RIP_AND_FINISH();
8625 IEM_MC_END();
8626 }
8627 else
8628 {
8629 /* memory target */
8630 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8634 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8635 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8636 } IEM_MC_ELSE() {
8637 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8638 } IEM_MC_ENDIF();
8639 IEM_MC_ADVANCE_RIP_AND_FINISH();
8640 IEM_MC_END();
8641 }
8642}
8643
8644
8645/** Opcode 0x0f 0x9a. */
8646FNIEMOP_DEF(iemOp_setp_Eb)
8647{
8648 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8649 IEMOP_HLP_MIN_386();
8650 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8651
8652 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8653 * any way. AMD says it's "unused", whatever that means. We're
8654 * ignoring for now. */
8655 if (IEM_IS_MODRM_REG_MODE(bRm))
8656 {
8657 /* register target */
8658 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8660 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8661 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8662 } IEM_MC_ELSE() {
8663 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8664 } IEM_MC_ENDIF();
8665 IEM_MC_ADVANCE_RIP_AND_FINISH();
8666 IEM_MC_END();
8667 }
8668 else
8669 {
8670 /* memory target */
8671 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8675 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8676 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8677 } IEM_MC_ELSE() {
8678 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8679 } IEM_MC_ENDIF();
8680 IEM_MC_ADVANCE_RIP_AND_FINISH();
8681 IEM_MC_END();
8682 }
8683}
8684
8685
8686/** Opcode 0x0f 0x9b. */
8687FNIEMOP_DEF(iemOp_setnp_Eb)
8688{
8689 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8690 IEMOP_HLP_MIN_386();
8691 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8692
8693 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8694 * any way. AMD says it's "unused", whatever that means. We're
8695 * ignoring for now. */
8696 if (IEM_IS_MODRM_REG_MODE(bRm))
8697 {
8698 /* register target */
8699 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8701 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8702 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8703 } IEM_MC_ELSE() {
8704 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8705 } IEM_MC_ENDIF();
8706 IEM_MC_ADVANCE_RIP_AND_FINISH();
8707 IEM_MC_END();
8708 }
8709 else
8710 {
8711 /* memory target */
8712 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8716 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8717 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8718 } IEM_MC_ELSE() {
8719 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8720 } IEM_MC_ENDIF();
8721 IEM_MC_ADVANCE_RIP_AND_FINISH();
8722 IEM_MC_END();
8723 }
8724}
8725
8726
8727/** Opcode 0x0f 0x9c. */
8728FNIEMOP_DEF(iemOp_setl_Eb)
8729{
8730 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8731 IEMOP_HLP_MIN_386();
8732 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8733
8734 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8735 * any way. AMD says it's "unused", whatever that means. We're
8736 * ignoring for now. */
8737 if (IEM_IS_MODRM_REG_MODE(bRm))
8738 {
8739 /* register target */
8740 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8742 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8743 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8744 } IEM_MC_ELSE() {
8745 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8746 } IEM_MC_ENDIF();
8747 IEM_MC_ADVANCE_RIP_AND_FINISH();
8748 IEM_MC_END();
8749 }
8750 else
8751 {
8752 /* memory target */
8753 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8757 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8758 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8759 } IEM_MC_ELSE() {
8760 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8761 } IEM_MC_ENDIF();
8762 IEM_MC_ADVANCE_RIP_AND_FINISH();
8763 IEM_MC_END();
8764 }
8765}
8766
8767
8768/** Opcode 0x0f 0x9d. */
8769FNIEMOP_DEF(iemOp_setnl_Eb)
8770{
8771 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8772 IEMOP_HLP_MIN_386();
8773 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8774
8775 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8776 * any way. AMD says it's "unused", whatever that means. We're
8777 * ignoring for now. */
8778 if (IEM_IS_MODRM_REG_MODE(bRm))
8779 {
8780 /* register target */
8781 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8783 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8784 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8785 } IEM_MC_ELSE() {
8786 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8787 } IEM_MC_ENDIF();
8788 IEM_MC_ADVANCE_RIP_AND_FINISH();
8789 IEM_MC_END();
8790 }
8791 else
8792 {
8793 /* memory target */
8794 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8798 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8799 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8800 } IEM_MC_ELSE() {
8801 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8802 } IEM_MC_ENDIF();
8803 IEM_MC_ADVANCE_RIP_AND_FINISH();
8804 IEM_MC_END();
8805 }
8806}
8807
8808
8809/** Opcode 0x0f 0x9e. */
8810FNIEMOP_DEF(iemOp_setle_Eb)
8811{
8812 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8813 IEMOP_HLP_MIN_386();
8814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8815
8816 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8817 * any way. AMD says it's "unused", whatever that means. We're
8818 * ignoring for now. */
8819 if (IEM_IS_MODRM_REG_MODE(bRm))
8820 {
8821 /* register target */
8822 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8824 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8825 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8826 } IEM_MC_ELSE() {
8827 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8828 } IEM_MC_ENDIF();
8829 IEM_MC_ADVANCE_RIP_AND_FINISH();
8830 IEM_MC_END();
8831 }
8832 else
8833 {
8834 /* memory target */
8835 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8839 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8840 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8841 } IEM_MC_ELSE() {
8842 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8843 } IEM_MC_ENDIF();
8844 IEM_MC_ADVANCE_RIP_AND_FINISH();
8845 IEM_MC_END();
8846 }
8847}
8848
8849
8850/** Opcode 0x0f 0x9f. */
8851FNIEMOP_DEF(iemOp_setnle_Eb)
8852{
8853 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8854 IEMOP_HLP_MIN_386();
8855 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8856
8857 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8858 * any way. AMD says it's "unused", whatever that means. We're
8859 * ignoring for now. */
8860 if (IEM_IS_MODRM_REG_MODE(bRm))
8861 {
8862 /* register target */
8863 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386);
8864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8865 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8866 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8867 } IEM_MC_ELSE() {
8868 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8869 } IEM_MC_ENDIF();
8870 IEM_MC_ADVANCE_RIP_AND_FINISH();
8871 IEM_MC_END();
8872 }
8873 else
8874 {
8875 /* memory target */
8876 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
8877 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8880 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8881 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8882 } IEM_MC_ELSE() {
8883 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8884 } IEM_MC_ENDIF();
8885 IEM_MC_ADVANCE_RIP_AND_FINISH();
8886 IEM_MC_END();
8887 }
8888}
8889
8890
8891/** Opcode 0x0f 0xa0. */
8892FNIEMOP_DEF(iemOp_push_fs)
8893{
8894 IEMOP_MNEMONIC(push_fs, "push fs");
8895 IEMOP_HLP_MIN_386();
8896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8897 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8898}
8899
8900
8901/** Opcode 0x0f 0xa1. */
8902FNIEMOP_DEF(iemOp_pop_fs)
8903{
8904 IEMOP_MNEMONIC(pop_fs, "pop fs");
8905 IEMOP_HLP_MIN_386();
8906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8907 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8908}
8909
8910
8911/** Opcode 0x0f 0xa2. */
8912FNIEMOP_DEF(iemOp_cpuid)
8913{
8914 IEMOP_MNEMONIC(cpuid, "cpuid");
8915 IEMOP_HLP_MIN_486(); /* not all 486es. */
8916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8917 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_cpuid);
8918}
8919
8920
8921/**
8922 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8923 * iemOp_bts_Ev_Gv.
8924 */
8925
8926#define IEMOP_BODY_BIT_Ev_Gv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
8927 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8928 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8929 \
8930 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8931 { \
8932 /* register destination. */ \
8933 switch (pVCpu->iem.s.enmEffOpSize) \
8934 { \
8935 case IEMMODE_16BIT: \
8936 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
8937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8938 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8939 IEM_MC_ARG(uint16_t, u16Src, 1); \
8940 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8941 \
8942 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8943 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
8944 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8945 IEM_MC_REF_EFLAGS(pEFlags); \
8946 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
8947 \
8948 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8949 IEM_MC_END(); \
8950 break; \
8951 \
8952 case IEMMODE_32BIT: \
8953 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
8954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8955 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8956 IEM_MC_ARG(uint32_t, u32Src, 1); \
8957 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8958 \
8959 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8960 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
8961 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8962 IEM_MC_REF_EFLAGS(pEFlags); \
8963 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
8964 \
8965 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
8966 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8967 IEM_MC_END(); \
8968 break; \
8969 \
8970 case IEMMODE_64BIT: \
8971 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT); \
8972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8973 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8974 IEM_MC_ARG(uint64_t, u64Src, 1); \
8975 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8976 \
8977 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8978 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
8979 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8980 IEM_MC_REF_EFLAGS(pEFlags); \
8981 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
8982 \
8983 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8984 IEM_MC_END(); \
8985 break; \
8986 \
8987 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8988 } \
8989 } \
8990 else \
8991 { \
8992 /* memory destination. */ \
8993 /** @todo test negative bit offsets! */ \
8994 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
8995 { \
8996 switch (pVCpu->iem.s.enmEffOpSize) \
8997 { \
8998 case IEMMODE_16BIT: \
8999 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386); \
9000 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9001 IEM_MC_ARG(uint16_t, u16Src, 1); \
9002 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9004 IEM_MC_LOCAL(int16_t, i16AddrAdj); \
9005 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9006 \
9007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9008 IEMOP_HLP_DONE_DECODING(); \
9009 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9010 IEM_MC_ASSIGN(i16AddrAdj, u16Src); \
9011 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9012 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9013 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9014 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9015 IEM_MC_FETCH_EFLAGS(EFlags); \
9016 \
9017 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9018 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9019 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
9020 \
9021 IEM_MC_COMMIT_EFLAGS(EFlags); \
9022 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9023 IEM_MC_END(); \
9024 break; \
9025 \
9026 case IEMMODE_32BIT: \
9027 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386); \
9028 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9029 IEM_MC_ARG(uint32_t, u32Src, 1); \
9030 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9032 IEM_MC_LOCAL(int32_t, i32AddrAdj); \
9033 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9034 \
9035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9036 IEMOP_HLP_DONE_DECODING(); \
9037 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9038 IEM_MC_ASSIGN(i32AddrAdj, u32Src); \
9039 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9040 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9041 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9042 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9043 IEM_MC_FETCH_EFLAGS(EFlags); \
9044 \
9045 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9046 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9047 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
9048 \
9049 IEM_MC_COMMIT_EFLAGS(EFlags); \
9050 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9051 IEM_MC_END(); \
9052 break; \
9053 \
9054 case IEMMODE_64BIT: \
9055 IEM_MC_BEGIN(3, 5, IEM_MC_F_64BIT); \
9056 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9057 IEM_MC_ARG(uint64_t, u64Src, 1); \
9058 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9060 IEM_MC_LOCAL(int64_t, i64AddrAdj); \
9061 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9062 \
9063 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9064 IEMOP_HLP_DONE_DECODING(); \
9065 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9066 IEM_MC_ASSIGN(i64AddrAdj, u64Src); \
9067 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9068 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9069 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9070 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9071 IEM_MC_FETCH_EFLAGS(EFlags); \
9072 \
9073 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9074 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9075 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
9076 \
9077 IEM_MC_COMMIT_EFLAGS(EFlags); \
9078 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9079 IEM_MC_END(); \
9080 break; \
9081 \
9082 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9083 } \
9084 } \
9085 else \
9086 { \
9087 (void)0
9088/* Separate macro to work around parsing issue in IEMAllInstPython.py */
9089#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
9090 switch (pVCpu->iem.s.enmEffOpSize) \
9091 { \
9092 case IEMMODE_16BIT: \
9093 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386); \
9094 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9095 IEM_MC_ARG(uint16_t, u16Src, 1); \
9096 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9098 IEM_MC_LOCAL(int16_t, i16AddrAdj); \
9099 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9100 \
9101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9102 IEMOP_HLP_DONE_DECODING(); \
9103 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9104 IEM_MC_ASSIGN(i16AddrAdj, u16Src); \
9105 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9106 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9107 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9108 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9109 IEM_MC_FETCH_EFLAGS(EFlags); \
9110 \
9111 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9112 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
9113 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
9114 \
9115 IEM_MC_COMMIT_EFLAGS(EFlags); \
9116 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9117 IEM_MC_END(); \
9118 break; \
9119 \
9120 case IEMMODE_32BIT: \
9121 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386); \
9122 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9123 IEM_MC_ARG(uint32_t, u32Src, 1); \
9124 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9126 IEM_MC_LOCAL(int32_t, i32AddrAdj); \
9127 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9128 \
9129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9130 IEMOP_HLP_DONE_DECODING(); \
9131 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9132 IEM_MC_ASSIGN(i32AddrAdj, u32Src); \
9133 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9134 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9135 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9136 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9137 IEM_MC_FETCH_EFLAGS(EFlags); \
9138 \
9139 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9140 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
9141 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
9142 \
9143 IEM_MC_COMMIT_EFLAGS(EFlags); \
9144 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9145 IEM_MC_END(); \
9146 break; \
9147 \
9148 case IEMMODE_64BIT: \
9149 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT); \
9150 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9151 IEM_MC_ARG(uint64_t, u64Src, 1); \
9152 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9154 IEM_MC_LOCAL(int64_t, i64AddrAdj); \
9155 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9156 \
9157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9158 IEMOP_HLP_DONE_DECODING(); \
9159 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9160 IEM_MC_ASSIGN(i64AddrAdj, u64Src); \
9161 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9162 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9163 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9164 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9165 IEM_MC_FETCH_EFLAGS(EFlags); \
9166 \
9167 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9168 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
9169 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
9170 \
9171 IEM_MC_COMMIT_EFLAGS(EFlags); \
9172 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9173 IEM_MC_END(); \
9174 break; \
9175 \
9176 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9177 } \
9178 } \
9179 } \
9180 (void)0
9181
9182/* Read-only version (bt). */
9183#define IEMOP_BODY_BIT_Ev_Gv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
9184 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9185 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
9186 \
9187 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9188 { \
9189 /* register destination. */ \
9190 switch (pVCpu->iem.s.enmEffOpSize) \
9191 { \
9192 case IEMMODE_16BIT: \
9193 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
9194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9195 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
9196 IEM_MC_ARG(uint16_t, u16Src, 1); \
9197 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9198 \
9199 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9200 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
9201 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9202 IEM_MC_REF_EFLAGS(pEFlags); \
9203 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9204 \
9205 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9206 IEM_MC_END(); \
9207 break; \
9208 \
9209 case IEMMODE_32BIT: \
9210 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
9211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9212 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
9213 IEM_MC_ARG(uint32_t, u32Src, 1); \
9214 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9215 \
9216 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9217 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9218 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9219 IEM_MC_REF_EFLAGS(pEFlags); \
9220 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9221 \
9222 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9223 IEM_MC_END(); \
9224 break; \
9225 \
9226 case IEMMODE_64BIT: \
9227 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT); \
9228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9229 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
9230 IEM_MC_ARG(uint64_t, u64Src, 1); \
9231 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9232 \
9233 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9234 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9235 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9236 IEM_MC_REF_EFLAGS(pEFlags); \
9237 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9238 \
9239 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9240 IEM_MC_END(); \
9241 break; \
9242 \
9243 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9244 } \
9245 } \
9246 else \
9247 { \
9248 /* memory destination. */ \
9249 /** @todo test negative bit offsets! */ \
9250 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9251 { \
9252 switch (pVCpu->iem.s.enmEffOpSize) \
9253 { \
9254 case IEMMODE_16BIT: \
9255 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386); \
9256 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
9257 IEM_MC_ARG(uint16_t, u16Src, 1); \
9258 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9260 IEM_MC_LOCAL(int16_t, i16AddrAdj); \
9261 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9262 \
9263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9264 IEMOP_HLP_DONE_DECODING(); \
9265 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9266 IEM_MC_ASSIGN(i16AddrAdj, u16Src); \
9267 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9268 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9269 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9270 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9271 IEM_MC_FETCH_EFLAGS(EFlags); \
9272 \
9273 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9274 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9275 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
9276 \
9277 IEM_MC_COMMIT_EFLAGS(EFlags); \
9278 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9279 IEM_MC_END(); \
9280 break; \
9281 \
9282 case IEMMODE_32BIT: \
9283 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386); \
9284 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
9285 IEM_MC_ARG(uint32_t, u32Src, 1); \
9286 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9288 IEM_MC_LOCAL(int32_t, i32AddrAdj); \
9289 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9290 \
9291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9292 IEMOP_HLP_DONE_DECODING(); \
9293 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9294 IEM_MC_ASSIGN(i32AddrAdj, u32Src); \
9295 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9296 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9297 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9298 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9299 IEM_MC_FETCH_EFLAGS(EFlags); \
9300 \
9301 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9302 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9303 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
9304 \
9305 IEM_MC_COMMIT_EFLAGS(EFlags); \
9306 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9307 IEM_MC_END(); \
9308 break; \
9309 \
9310 case IEMMODE_64BIT: \
9311 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT); \
9312 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
9313 IEM_MC_ARG(uint64_t, u64Src, 1); \
9314 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9316 IEM_MC_LOCAL(int64_t, i64AddrAdj); \
9317 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9318 \
9319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9320 IEMOP_HLP_DONE_DECODING(); \
9321 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9322 IEM_MC_ASSIGN(i64AddrAdj, u64Src); \
9323 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9324 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9325 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9326 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9327 IEM_MC_FETCH_EFLAGS(EFlags); \
9328 \
9329 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9330 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9331 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
9332 \
9333 IEM_MC_COMMIT_EFLAGS(EFlags); \
9334 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9335 IEM_MC_END(); \
9336 break; \
9337 \
9338 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9339 } \
9340 } \
9341 else \
9342 { \
9343 IEMOP_HLP_DONE_DECODING(); \
9344 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
9345 } \
9346 } \
9347 (void)0
9348
9349
9350/** Opcode 0x0f 0xa3. */
9351FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9352{
9353 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9354 IEMOP_HLP_MIN_386();
9355 IEMOP_BODY_BIT_Ev_Gv_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
9356}
9357
9358
9359/**
9360 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9361 */
9362FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
9363{
9364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9365 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9366
9367 if (IEM_IS_MODRM_REG_MODE(bRm))
9368 {
9369 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9370
9371 switch (pVCpu->iem.s.enmEffOpSize)
9372 {
9373 case IEMMODE_16BIT:
9374 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386);
9375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9376 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9377 IEM_MC_ARG(uint16_t, u16Src, 1);
9378 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9379 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9380
9381 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9382 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9383 IEM_MC_REF_EFLAGS(pEFlags);
9384 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9385
9386 IEM_MC_ADVANCE_RIP_AND_FINISH();
9387 IEM_MC_END();
9388 break;
9389
9390 case IEMMODE_32BIT:
9391 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386);
9392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9393 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9394 IEM_MC_ARG(uint32_t, u32Src, 1);
9395 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9396 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9397
9398 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9399 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9400 IEM_MC_REF_EFLAGS(pEFlags);
9401 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9402
9403 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9404 IEM_MC_ADVANCE_RIP_AND_FINISH();
9405 IEM_MC_END();
9406 break;
9407
9408 case IEMMODE_64BIT:
9409 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT);
9410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9411 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9412 IEM_MC_ARG(uint64_t, u64Src, 1);
9413 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9414 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9415
9416 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9417 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9418 IEM_MC_REF_EFLAGS(pEFlags);
9419 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9420
9421 IEM_MC_ADVANCE_RIP_AND_FINISH();
9422 IEM_MC_END();
9423 break;
9424
9425 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9426 }
9427 }
9428 else
9429 {
9430 switch (pVCpu->iem.s.enmEffOpSize)
9431 {
9432 case IEMMODE_16BIT:
9433 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386);
9434 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9435 IEM_MC_ARG(uint16_t, u16Src, 1);
9436 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9437 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9439 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9440
9441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9442 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9443 IEM_MC_ASSIGN(cShiftArg, cShift);
9444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9445 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9446 IEM_MC_FETCH_EFLAGS(EFlags);
9447 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9448 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9449
9450 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
9451 IEM_MC_COMMIT_EFLAGS(EFlags);
9452 IEM_MC_ADVANCE_RIP_AND_FINISH();
9453 IEM_MC_END();
9454 break;
9455
9456 case IEMMODE_32BIT:
9457 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386);
9458 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9459 IEM_MC_ARG(uint32_t, u32Src, 1);
9460 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9461 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9463 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9464
9465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9466 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9467 IEM_MC_ASSIGN(cShiftArg, cShift);
9468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9469 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9470 IEM_MC_FETCH_EFLAGS(EFlags);
9471 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9472 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9473
9474 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
9475 IEM_MC_COMMIT_EFLAGS(EFlags);
9476 IEM_MC_ADVANCE_RIP_AND_FINISH();
9477 IEM_MC_END();
9478 break;
9479
9480 case IEMMODE_64BIT:
9481 IEM_MC_BEGIN(4, 3, IEM_MC_F_64BIT);
9482 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9483 IEM_MC_ARG(uint64_t, u64Src, 1);
9484 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9485 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9486 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9487 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9488
9489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9490 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9491 IEM_MC_ASSIGN(cShiftArg, cShift);
9492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9493 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9494 IEM_MC_FETCH_EFLAGS(EFlags);
9495 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9496 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9497
9498 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
9499 IEM_MC_COMMIT_EFLAGS(EFlags);
9500 IEM_MC_ADVANCE_RIP_AND_FINISH();
9501 IEM_MC_END();
9502 break;
9503
9504 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9505 }
9506 }
9507}
9508
9509
9510/**
9511 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9512 */
9513FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
9514{
9515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9516 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9517
9518 if (IEM_IS_MODRM_REG_MODE(bRm))
9519 {
9520 switch (pVCpu->iem.s.enmEffOpSize)
9521 {
9522 case IEMMODE_16BIT:
9523 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386);
9524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9525 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9526 IEM_MC_ARG(uint16_t, u16Src, 1);
9527 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9528 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9529
9530 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9531 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9532 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9533 IEM_MC_REF_EFLAGS(pEFlags);
9534 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9535
9536 IEM_MC_ADVANCE_RIP_AND_FINISH();
9537 IEM_MC_END();
9538 break;
9539
9540 case IEMMODE_32BIT:
9541 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386);
9542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9543 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9544 IEM_MC_ARG(uint32_t, u32Src, 1);
9545 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9546 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9547
9548 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9549 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9550 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9551 IEM_MC_REF_EFLAGS(pEFlags);
9552 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9553
9554 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9555 IEM_MC_ADVANCE_RIP_AND_FINISH();
9556 IEM_MC_END();
9557 break;
9558
9559 case IEMMODE_64BIT:
9560 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT);
9561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9562 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9563 IEM_MC_ARG(uint64_t, u64Src, 1);
9564 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9565 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9566
9567 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9568 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9569 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9570 IEM_MC_REF_EFLAGS(pEFlags);
9571 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9572
9573 IEM_MC_ADVANCE_RIP_AND_FINISH();
9574 IEM_MC_END();
9575 break;
9576
9577 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9578 }
9579 }
9580 else
9581 {
9582 switch (pVCpu->iem.s.enmEffOpSize)
9583 {
9584 case IEMMODE_16BIT:
9585 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386);
9586 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9587 IEM_MC_ARG(uint16_t, u16Src, 1);
9588 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9589 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9591 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9592
9593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9595 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9596 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9597 IEM_MC_FETCH_EFLAGS(EFlags);
9598 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9599 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9600
9601 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
9602 IEM_MC_COMMIT_EFLAGS(EFlags);
9603 IEM_MC_ADVANCE_RIP_AND_FINISH();
9604 IEM_MC_END();
9605 break;
9606
9607 case IEMMODE_32BIT:
9608 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386);
9609 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9610 IEM_MC_ARG(uint32_t, u32Src, 1);
9611 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9612 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9614 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9615
9616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9618 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9619 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9620 IEM_MC_FETCH_EFLAGS(EFlags);
9621 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9622 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9623
9624 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
9625 IEM_MC_COMMIT_EFLAGS(EFlags);
9626 IEM_MC_ADVANCE_RIP_AND_FINISH();
9627 IEM_MC_END();
9628 break;
9629
9630 case IEMMODE_64BIT:
9631 IEM_MC_BEGIN(4, 3, IEM_MC_F_64BIT);
9632 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9633 IEM_MC_ARG(uint64_t, u64Src, 1);
9634 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9635 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9636 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9637 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9638
9639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9641 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9642 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9643 IEM_MC_FETCH_EFLAGS(EFlags);
9644 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9645 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9646
9647 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
9648 IEM_MC_COMMIT_EFLAGS(EFlags);
9649 IEM_MC_ADVANCE_RIP_AND_FINISH();
9650 IEM_MC_END();
9651 break;
9652
9653 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9654 }
9655 }
9656}
9657
9658
9659
9660/** Opcode 0x0f 0xa4. */
9661FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9662{
9663 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9664 IEMOP_HLP_MIN_386();
9665 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9666}
9667
9668
9669/** Opcode 0x0f 0xa5. */
9670FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9671{
9672 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9673 IEMOP_HLP_MIN_386();
9674 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9675}
9676
9677
9678/** Opcode 0x0f 0xa8. */
9679FNIEMOP_DEF(iemOp_push_gs)
9680{
9681 IEMOP_MNEMONIC(push_gs, "push gs");
9682 IEMOP_HLP_MIN_386();
9683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9684 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9685}
9686
9687
9688/** Opcode 0x0f 0xa9. */
9689FNIEMOP_DEF(iemOp_pop_gs)
9690{
9691 IEMOP_MNEMONIC(pop_gs, "pop gs");
9692 IEMOP_HLP_MIN_386();
9693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9694 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9695}
9696
9697
9698/** Opcode 0x0f 0xaa. */
9699FNIEMOP_DEF(iemOp_rsm)
9700{
9701 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9702 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9704 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
9705 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
9706 iemCImpl_rsm);
9707}
9708
9709
9710
9711/** Opcode 0x0f 0xab. */
9712FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9713{
9714 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9715 IEMOP_HLP_MIN_386();
9716 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
9717 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9718}
9719
9720
9721/** Opcode 0x0f 0xac. */
9722FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9723{
9724 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9725 IEMOP_HLP_MIN_386();
9726 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9727}
9728
9729
9730/** Opcode 0x0f 0xad. */
9731FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9732{
9733 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9734 IEMOP_HLP_MIN_386();
9735 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9736}
9737
9738
9739/** Opcode 0x0f 0xae mem/0. */
9740FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9741{
9742 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9743 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9744 IEMOP_RAISE_INVALID_OPCODE_RET();
9745
9746 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_PENTIUM_II);
9747 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9748 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9749 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9752 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9753 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9754 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9755 IEM_MC_END();
9756}
9757
9758
9759/** Opcode 0x0f 0xae mem/1. */
9760FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9761{
9762 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9763 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9764 IEMOP_RAISE_INVALID_OPCODE_RET();
9765
9766 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_PENTIUM_II);
9767 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9768 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9769 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9770 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9772 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9773 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9774 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9775 IEM_MC_END();
9776}
9777
9778
9779/**
9780 * @opmaps grp15
9781 * @opcode !11/2
9782 * @oppfx none
9783 * @opcpuid sse
9784 * @opgroup og_sse_mxcsrsm
9785 * @opxcpttype 5
9786 * @optest op1=0 -> mxcsr=0
9787 * @optest op1=0x2083 -> mxcsr=0x2083
9788 * @optest op1=0xfffffffe -> value.xcpt=0xd
9789 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9790 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9791 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9792 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9793 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9794 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9795 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9796 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9797 */
9798FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9799{
9800 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9801 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9802 IEMOP_RAISE_INVALID_OPCODE_RET();
9803
9804 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_PENTIUM_II);
9805 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9806 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9809 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9810 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9811 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9812 IEM_MC_END();
9813}
9814
9815
9816/**
9817 * @opmaps grp15
9818 * @opcode !11/3
9819 * @oppfx none
9820 * @opcpuid sse
9821 * @opgroup og_sse_mxcsrsm
9822 * @opxcpttype 5
9823 * @optest mxcsr=0 -> op1=0
9824 * @optest mxcsr=0x2083 -> op1=0x2083
9825 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9826 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9827 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9828 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9829 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9830 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9831 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9832 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9833 */
9834FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9835{
9836 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9837 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9838 IEMOP_RAISE_INVALID_OPCODE_RET();
9839
9840 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_PENTIUM_II);
9841 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9842 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9845 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9846 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9847 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9848 IEM_MC_END();
9849}
9850
9851
9852/**
9853 * @opmaps grp15
9854 * @opcode !11/4
9855 * @oppfx none
9856 * @opcpuid xsave
9857 * @opgroup og_system
9858 * @opxcpttype none
9859 */
9860FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9861{
9862 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9863 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9864 IEMOP_RAISE_INVALID_OPCODE_RET();
9865
9866 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_CORE);
9867 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9868 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9869 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9870 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9872 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9873 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9874 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9875 IEM_MC_END();
9876}
9877
9878
9879/**
9880 * @opmaps grp15
9881 * @opcode !11/5
9882 * @oppfx none
9883 * @opcpuid xsave
9884 * @opgroup og_system
9885 * @opxcpttype none
9886 */
9887FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9888{
9889 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9890 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9891 IEMOP_RAISE_INVALID_OPCODE_RET();
9892
9893 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_CORE);
9894 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9895 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9896 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
9897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9899 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9900 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9901 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9902 IEM_MC_END();
9903}
9904
9905/** Opcode 0x0f 0xae mem/6. */
9906FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9907
9908/**
9909 * @opmaps grp15
9910 * @opcode !11/7
9911 * @oppfx none
9912 * @opcpuid clfsh
9913 * @opgroup og_cachectl
9914 * @optest op1=1 ->
9915 */
9916FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9917{
9918 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9919 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9920 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9921
9922 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
9923 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9924 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9927 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9928 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9929 IEM_MC_END();
9930}
9931
9932/**
9933 * @opmaps grp15
9934 * @opcode !11/7
9935 * @oppfx 0x66
9936 * @opcpuid clflushopt
9937 * @opgroup og_cachectl
9938 * @optest op1=1 ->
9939 */
9940FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9941{
9942 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9943 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9944 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9945
9946 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
9947 IEM_MC_ARG(uint8_t, iEffSeg, 0);
9948 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9951 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9952 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9953 IEM_MC_END();
9954}
9955
9956
9957/** Opcode 0x0f 0xae 11b/5. */
9958FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9959{
9960 RT_NOREF_PV(bRm);
9961 IEMOP_MNEMONIC(lfence, "lfence");
9962 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER);
9963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9964#ifdef RT_ARCH_ARM64
9965 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9966#else
9967 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9968 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9969 else
9970 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9971#endif
9972 IEM_MC_ADVANCE_RIP_AND_FINISH();
9973 IEM_MC_END();
9974}
9975
9976
9977/** Opcode 0x0f 0xae 11b/6. */
9978FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9979{
9980 RT_NOREF_PV(bRm);
9981 IEMOP_MNEMONIC(mfence, "mfence");
9982 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER);
9983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9984#ifdef RT_ARCH_ARM64
9985 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9986#else
9987 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9988 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9989 else
9990 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9991#endif
9992 IEM_MC_ADVANCE_RIP_AND_FINISH();
9993 IEM_MC_END();
9994}
9995
9996
9997/** Opcode 0x0f 0xae 11b/7. */
9998FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9999{
10000 RT_NOREF_PV(bRm);
10001 IEMOP_MNEMONIC(sfence, "sfence");
10002 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER);
10003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
10004#ifdef RT_ARCH_ARM64
10005 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
10006#else
10007 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
10008 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
10009 else
10010 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
10011#endif
10012 IEM_MC_ADVANCE_RIP_AND_FINISH();
10013 IEM_MC_END();
10014}
10015
10016
10017/** Opcode 0xf3 0x0f 0xae 11b/0. */
10018FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
10019{
10020 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
10021 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10022 {
10023 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT);
10024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10025 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10026 IEM_MC_ARG(uint64_t, u64Dst, 0);
10027 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
10028 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10029 IEM_MC_ADVANCE_RIP_AND_FINISH();
10030 IEM_MC_END();
10031 }
10032 else
10033 {
10034 IEM_MC_BEGIN(1, 0, IEM_MC_F_NOT_286_OR_OLDER);
10035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10036 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10037 IEM_MC_ARG(uint32_t, u32Dst, 0);
10038 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
10039 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10040 IEM_MC_ADVANCE_RIP_AND_FINISH();
10041 IEM_MC_END();
10042 }
10043}
10044
10045
10046/** Opcode 0xf3 0x0f 0xae 11b/1. */
10047FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
10048{
10049 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
10050 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10051 {
10052 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT);
10053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10054 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10055 IEM_MC_ARG(uint64_t, u64Dst, 0);
10056 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
10057 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10058 IEM_MC_ADVANCE_RIP_AND_FINISH();
10059 IEM_MC_END();
10060 }
10061 else
10062 {
10063 IEM_MC_BEGIN(1, 0, IEM_MC_F_NOT_286_OR_OLDER);
10064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10065 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10066 IEM_MC_ARG(uint32_t, u32Dst, 0);
10067 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
10068 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10069 IEM_MC_ADVANCE_RIP_AND_FINISH();
10070 IEM_MC_END();
10071 }
10072}
10073
10074
10075/** Opcode 0xf3 0x0f 0xae 11b/2. */
10076FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
10077{
10078 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
10079 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10080 {
10081 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT);
10082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10083 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10084 IEM_MC_ARG(uint64_t, u64Dst, 0);
10085 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10086 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10087 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
10088 IEM_MC_ADVANCE_RIP_AND_FINISH();
10089 IEM_MC_END();
10090 }
10091 else
10092 {
10093 IEM_MC_BEGIN(1, 0, IEM_MC_F_NOT_286_OR_OLDER);
10094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10095 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10096 IEM_MC_ARG(uint32_t, u32Dst, 0);
10097 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10098 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
10099 IEM_MC_ADVANCE_RIP_AND_FINISH();
10100 IEM_MC_END();
10101 }
10102}
10103
10104
10105/** Opcode 0xf3 0x0f 0xae 11b/3. */
10106FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
10107{
10108 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
10109 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10110 {
10111 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT);
10112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10113 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10114 IEM_MC_ARG(uint64_t, u64Dst, 0);
10115 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10116 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10117 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
10118 IEM_MC_ADVANCE_RIP_AND_FINISH();
10119 IEM_MC_END();
10120 }
10121 else
10122 {
10123 IEM_MC_BEGIN(1, 0, IEM_MC_F_NOT_286_OR_OLDER);
10124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10125 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10126 IEM_MC_ARG(uint32_t, u32Dst, 0);
10127 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10128 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10129 IEM_MC_ADVANCE_RIP_AND_FINISH();
10130 IEM_MC_END();
10131 }
10132}
10133
10134
10135/**
10136 * Group 15 jump table for register variant.
10137 */
10138IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10139{ /* pfx: none, 066h, 0f3h, 0f2h */
10140 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10141 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10142 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10143 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10144 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10145 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10146 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10147 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10148};
10149AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10150
10151
10152/**
10153 * Group 15 jump table for memory variant.
10154 */
10155IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10156{ /* pfx: none, 066h, 0f3h, 0f2h */
10157 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10158 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10159 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10160 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10161 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10162 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10163 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10164 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10165};
10166AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10167
10168
10169/** Opcode 0x0f 0xae. */
10170FNIEMOP_DEF(iemOp_Grp15)
10171{
10172 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10173 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10174 if (IEM_IS_MODRM_REG_MODE(bRm))
10175 /* register, register */
10176 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10177 + pVCpu->iem.s.idxPrefix], bRm);
10178 /* memory, register */
10179 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10180 + pVCpu->iem.s.idxPrefix], bRm);
10181}
10182
10183
10184/** Opcode 0x0f 0xaf. */
10185FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10186{
10187 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10188 IEMOP_HLP_MIN_386();
10189 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10190 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10191 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_MIN_386);
10192}
10193
10194
10195/** Opcode 0x0f 0xb0. */
10196FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10197{
10198 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10199 IEMOP_HLP_MIN_486();
10200 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10201
10202 if (IEM_IS_MODRM_REG_MODE(bRm))
10203 {
10204 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486);
10205 IEMOP_HLP_DONE_DECODING();
10206 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10207 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10208 IEM_MC_ARG(uint8_t, u8Src, 2);
10209 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10210
10211 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10212 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10213 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10214 IEM_MC_REF_EFLAGS(pEFlags);
10215 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10216 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10217 else
10218 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10219
10220 IEM_MC_ADVANCE_RIP_AND_FINISH();
10221 IEM_MC_END();
10222 }
10223 else
10224 {
10225 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486);
10226 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10227 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10228 IEM_MC_ARG(uint8_t, u8Src, 2);
10229 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10230 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10231 IEM_MC_LOCAL(uint8_t, u8Al);
10232 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10233
10234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10235 IEMOP_HLP_DONE_DECODING();
10236 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10237 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10238 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
10239 IEM_MC_FETCH_EFLAGS(EFlags);
10240 IEM_MC_REF_LOCAL(pu8Al, u8Al);
10241 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10242 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10243 else
10244 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10245
10246 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
10247 IEM_MC_COMMIT_EFLAGS(EFlags);
10248 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
10249 IEM_MC_ADVANCE_RIP_AND_FINISH();
10250 IEM_MC_END();
10251 }
10252}
10253
10254/** Opcode 0x0f 0xb1. */
10255FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10256{
10257 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10258 IEMOP_HLP_MIN_486();
10259 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10260
10261 if (IEM_IS_MODRM_REG_MODE(bRm))
10262 {
10263 switch (pVCpu->iem.s.enmEffOpSize)
10264 {
10265 case IEMMODE_16BIT:
10266 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486);
10267 IEMOP_HLP_DONE_DECODING();
10268 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10269 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10270 IEM_MC_ARG(uint16_t, u16Src, 2);
10271 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10272
10273 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10274 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10275 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10276 IEM_MC_REF_EFLAGS(pEFlags);
10277 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10278 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10279 else
10280 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10281
10282 IEM_MC_ADVANCE_RIP_AND_FINISH();
10283 IEM_MC_END();
10284 break;
10285
10286 case IEMMODE_32BIT:
10287 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486);
10288 IEMOP_HLP_DONE_DECODING();
10289 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10290 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10291 IEM_MC_ARG(uint32_t, u32Src, 2);
10292 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10293
10294 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10295 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10296 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10297 IEM_MC_REF_EFLAGS(pEFlags);
10298 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10299 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10300 else
10301 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10302
10303 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10304 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
10305 } IEM_MC_ELSE() {
10306 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
10307 } IEM_MC_ENDIF();
10308
10309 IEM_MC_ADVANCE_RIP_AND_FINISH();
10310 IEM_MC_END();
10311 break;
10312
10313 case IEMMODE_64BIT:
10314 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT);
10315 IEMOP_HLP_DONE_DECODING();
10316 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10317 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10318#ifdef RT_ARCH_X86
10319 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10320#else
10321 IEM_MC_ARG(uint64_t, u64Src, 2);
10322#endif
10323 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10324
10325 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10326 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10327 IEM_MC_REF_EFLAGS(pEFlags);
10328#ifdef RT_ARCH_X86
10329 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10330 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10331 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10332 else
10333 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10334#else
10335 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10336 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10337 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10338 else
10339 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10340#endif
10341
10342 IEM_MC_ADVANCE_RIP_AND_FINISH();
10343 IEM_MC_END();
10344 break;
10345
10346 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10347 }
10348 }
10349 else
10350 {
10351 switch (pVCpu->iem.s.enmEffOpSize)
10352 {
10353 case IEMMODE_16BIT:
10354 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486);
10355 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10356 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10357 IEM_MC_ARG(uint16_t, u16Src, 2);
10358 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10360 IEM_MC_LOCAL(uint16_t, u16Ax);
10361 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10362
10363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10364 IEMOP_HLP_DONE_DECODING();
10365 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10366 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10367 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
10368 IEM_MC_FETCH_EFLAGS(EFlags);
10369 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
10370 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10371 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10372 else
10373 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10374
10375 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
10376 IEM_MC_COMMIT_EFLAGS(EFlags);
10377 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
10378 IEM_MC_ADVANCE_RIP_AND_FINISH();
10379 IEM_MC_END();
10380 break;
10381
10382 case IEMMODE_32BIT:
10383 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486);
10384 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10385 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10386 IEM_MC_ARG(uint32_t, u32Src, 2);
10387 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10389 IEM_MC_LOCAL(uint32_t, u32Eax);
10390 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10391
10392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10393 IEMOP_HLP_DONE_DECODING();
10394 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10395 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10396 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
10397 IEM_MC_FETCH_EFLAGS(EFlags);
10398 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
10399 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10400 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10401 else
10402 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10403
10404 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
10405 IEM_MC_COMMIT_EFLAGS(EFlags);
10406
10407 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10408 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
10409 } IEM_MC_ENDIF();
10410
10411 IEM_MC_ADVANCE_RIP_AND_FINISH();
10412 IEM_MC_END();
10413 break;
10414
10415 case IEMMODE_64BIT:
10416 IEM_MC_BEGIN(4, 4, IEM_MC_F_64BIT);
10417 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10418 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10419#ifdef RT_ARCH_X86
10420 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10421#else
10422 IEM_MC_ARG(uint64_t, u64Src, 2);
10423#endif
10424 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10426 IEM_MC_LOCAL(uint64_t, u64Rax);
10427 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10428
10429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10430 IEMOP_HLP_DONE_DECODING();
10431 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10432 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
10433 IEM_MC_FETCH_EFLAGS(EFlags);
10434 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
10435#ifdef RT_ARCH_X86
10436 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10437 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10438 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10439 else
10440 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10441#else
10442 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10443 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10444 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10445 else
10446 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10447#endif
10448
10449 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
10450 IEM_MC_COMMIT_EFLAGS(EFlags);
10451 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
10452 IEM_MC_ADVANCE_RIP_AND_FINISH();
10453 IEM_MC_END();
10454 break;
10455
10456 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10457 }
10458 }
10459}
10460
10461
10462/** Opcode 0x0f 0xb2. */
10463FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10464{
10465 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10466 IEMOP_HLP_MIN_386();
10467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10468 if (IEM_IS_MODRM_REG_MODE(bRm))
10469 IEMOP_RAISE_INVALID_OPCODE_RET();
10470 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10471}
10472
10473
10474/** Opcode 0x0f 0xb3. */
10475FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10476{
10477 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10478 IEMOP_HLP_MIN_386();
10479 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
10480 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10481}
10482
10483
10484/** Opcode 0x0f 0xb4. */
10485FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10486{
10487 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10488 IEMOP_HLP_MIN_386();
10489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10490 if (IEM_IS_MODRM_REG_MODE(bRm))
10491 IEMOP_RAISE_INVALID_OPCODE_RET();
10492 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10493}
10494
10495
10496/** Opcode 0x0f 0xb5. */
10497FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10498{
10499 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10500 IEMOP_HLP_MIN_386();
10501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10502 if (IEM_IS_MODRM_REG_MODE(bRm))
10503 IEMOP_RAISE_INVALID_OPCODE_RET();
10504 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10505}
10506
10507
10508/** Opcode 0x0f 0xb6. */
10509FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10510{
10511 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10512 IEMOP_HLP_MIN_386();
10513
10514 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10515
10516 /*
10517 * If rm is denoting a register, no more instruction bytes.
10518 */
10519 if (IEM_IS_MODRM_REG_MODE(bRm))
10520 {
10521 switch (pVCpu->iem.s.enmEffOpSize)
10522 {
10523 case IEMMODE_16BIT:
10524 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
10525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10526 IEM_MC_LOCAL(uint16_t, u16Value);
10527 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10528 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10529 IEM_MC_ADVANCE_RIP_AND_FINISH();
10530 IEM_MC_END();
10531 break;
10532
10533 case IEMMODE_32BIT:
10534 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
10535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10536 IEM_MC_LOCAL(uint32_t, u32Value);
10537 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10538 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10539 IEM_MC_ADVANCE_RIP_AND_FINISH();
10540 IEM_MC_END();
10541 break;
10542
10543 case IEMMODE_64BIT:
10544 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
10545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10546 IEM_MC_LOCAL(uint64_t, u64Value);
10547 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10548 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10549 IEM_MC_ADVANCE_RIP_AND_FINISH();
10550 IEM_MC_END();
10551 break;
10552
10553 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10554 }
10555 }
10556 else
10557 {
10558 /*
10559 * We're loading a register from memory.
10560 */
10561 switch (pVCpu->iem.s.enmEffOpSize)
10562 {
10563 case IEMMODE_16BIT:
10564 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
10565 IEM_MC_LOCAL(uint16_t, u16Value);
10566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10569 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10570 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10571 IEM_MC_ADVANCE_RIP_AND_FINISH();
10572 IEM_MC_END();
10573 break;
10574
10575 case IEMMODE_32BIT:
10576 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
10577 IEM_MC_LOCAL(uint32_t, u32Value);
10578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10581 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10582 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10583 IEM_MC_ADVANCE_RIP_AND_FINISH();
10584 IEM_MC_END();
10585 break;
10586
10587 case IEMMODE_64BIT:
10588 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
10589 IEM_MC_LOCAL(uint64_t, u64Value);
10590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10593 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10594 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10595 IEM_MC_ADVANCE_RIP_AND_FINISH();
10596 IEM_MC_END();
10597 break;
10598
10599 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10600 }
10601 }
10602}
10603
10604
10605/** Opcode 0x0f 0xb7. */
10606FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10607{
10608 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10609 IEMOP_HLP_MIN_386();
10610
10611 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10612
10613 /** @todo Not entirely sure how the operand size prefix is handled here,
10614 * assuming that it will be ignored. Would be nice to have a few
10615 * test for this. */
10616 /*
10617 * If rm is denoting a register, no more instruction bytes.
10618 */
10619 if (IEM_IS_MODRM_REG_MODE(bRm))
10620 {
10621 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10622 {
10623 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
10624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10625 IEM_MC_LOCAL(uint32_t, u32Value);
10626 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10627 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10628 IEM_MC_ADVANCE_RIP_AND_FINISH();
10629 IEM_MC_END();
10630 }
10631 else
10632 {
10633 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
10634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10635 IEM_MC_LOCAL(uint64_t, u64Value);
10636 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10637 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10638 IEM_MC_ADVANCE_RIP_AND_FINISH();
10639 IEM_MC_END();
10640 }
10641 }
10642 else
10643 {
10644 /*
10645 * We're loading a register from memory.
10646 */
10647 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10648 {
10649 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
10650 IEM_MC_LOCAL(uint32_t, u32Value);
10651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10654 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10655 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10656 IEM_MC_ADVANCE_RIP_AND_FINISH();
10657 IEM_MC_END();
10658 }
10659 else
10660 {
10661 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
10662 IEM_MC_LOCAL(uint64_t, u64Value);
10663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10666 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10667 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10668 IEM_MC_ADVANCE_RIP_AND_FINISH();
10669 IEM_MC_END();
10670 }
10671 }
10672}
10673
10674
10675/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10676FNIEMOP_UD_STUB(iemOp_jmpe);
10677
10678
10679/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
10680FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10681{
10682 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10683 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10684 return iemOp_InvalidNeedRM(pVCpu);
10685#ifndef TST_IEM_CHECK_MC
10686# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10687 static const IEMOPBINSIZES s_Native =
10688 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10689# endif
10690 static const IEMOPBINSIZES s_Fallback =
10691 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10692#endif
10693 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10694 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_NOT_286_OR_OLDER);
10695}
10696
10697
10698/**
10699 * @opcode 0xb9
10700 * @opinvalid intel-modrm
10701 * @optest ->
10702 */
10703FNIEMOP_DEF(iemOp_Grp10)
10704{
10705 /*
10706 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10707 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10708 */
10709 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10710 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10711 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10712}
10713
10714
10715/**
10716 * Body for group 8 bit instruction.
10717 */
10718#define IEMOP_BODY_BIT_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10719 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10720 \
10721 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10722 { \
10723 /* register destination. */ \
10724 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10725 \
10726 switch (pVCpu->iem.s.enmEffOpSize) \
10727 { \
10728 case IEMMODE_16BIT: \
10729 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
10730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10731 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10732 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10733 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10734 \
10735 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10736 IEM_MC_REF_EFLAGS(pEFlags); \
10737 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10738 \
10739 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10740 IEM_MC_END(); \
10741 break; \
10742 \
10743 case IEMMODE_32BIT: \
10744 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
10745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10746 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10747 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10748 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10749 \
10750 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10751 IEM_MC_REF_EFLAGS(pEFlags); \
10752 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10753 \
10754 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
10755 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10756 IEM_MC_END(); \
10757 break; \
10758 \
10759 case IEMMODE_64BIT: \
10760 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT); \
10761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10762 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10763 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10764 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10765 \
10766 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10767 IEM_MC_REF_EFLAGS(pEFlags); \
10768 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10769 \
10770 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10771 IEM_MC_END(); \
10772 break; \
10773 \
10774 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10775 } \
10776 } \
10777 else \
10778 { \
10779 /* memory destination. */ \
10780 /** @todo test negative bit offsets! */ \
10781 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10782 { \
10783 switch (pVCpu->iem.s.enmEffOpSize) \
10784 { \
10785 case IEMMODE_16BIT: \
10786 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
10787 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10788 IEM_MC_ARG(uint16_t, u16Src, 1); \
10789 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10791 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10792 \
10793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10794 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10795 IEM_MC_ASSIGN(u16Src, bImm & 0x0f); \
10796 IEMOP_HLP_DONE_DECODING(); \
10797 IEM_MC_FETCH_EFLAGS(EFlags); \
10798 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10799 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10800 \
10801 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
10802 IEM_MC_COMMIT_EFLAGS(EFlags); \
10803 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10804 IEM_MC_END(); \
10805 break; \
10806 \
10807 case IEMMODE_32BIT: \
10808 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
10809 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10810 IEM_MC_ARG(uint32_t, u32Src, 1); \
10811 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10813 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10814 \
10815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10816 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10817 IEM_MC_ASSIGN(u32Src, bImm & 0x1f); \
10818 IEMOP_HLP_DONE_DECODING(); \
10819 IEM_MC_FETCH_EFLAGS(EFlags); \
10820 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10821 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10822 \
10823 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
10824 IEM_MC_COMMIT_EFLAGS(EFlags); \
10825 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10826 IEM_MC_END(); \
10827 break; \
10828 \
10829 case IEMMODE_64BIT: \
10830 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT); \
10831 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10832 IEM_MC_ARG(uint64_t, u64Src, 1); \
10833 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10835 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10836 \
10837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10838 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10839 IEM_MC_ASSIGN(u64Src, bImm & 0x3f); \
10840 IEMOP_HLP_DONE_DECODING(); \
10841 IEM_MC_FETCH_EFLAGS(EFlags); \
10842 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10843 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10844 \
10845 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
10846 IEM_MC_COMMIT_EFLAGS(EFlags); \
10847 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10848 IEM_MC_END(); \
10849 break; \
10850 \
10851 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10852 } \
10853 } \
10854 else \
10855 { \
10856 (void)0
10857/* Separate macro to work around parsing issue in IEMAllInstPython.py */
10858#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
10859 switch (pVCpu->iem.s.enmEffOpSize) \
10860 { \
10861 case IEMMODE_16BIT: \
10862 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
10863 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10864 IEM_MC_ARG(uint16_t, u16Src, 1); \
10865 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10867 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10868 \
10869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10870 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10871 IEM_MC_ASSIGN(u16Src, bImm & 0x0f); \
10872 IEMOP_HLP_DONE_DECODING(); \
10873 IEM_MC_FETCH_EFLAGS(EFlags); \
10874 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10875 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
10876 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
10877 \
10878 IEM_MC_COMMIT_EFLAGS(EFlags); \
10879 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10880 IEM_MC_END(); \
10881 break; \
10882 \
10883 case IEMMODE_32BIT: \
10884 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
10885 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10886 IEM_MC_ARG(uint32_t, u32Src, 1); \
10887 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10888 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10889 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10890 \
10891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10892 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10893 IEM_MC_ASSIGN(u32Src, bImm & 0x1f); \
10894 IEMOP_HLP_DONE_DECODING(); \
10895 IEM_MC_FETCH_EFLAGS(EFlags); \
10896 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10897 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
10898 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
10899 \
10900 IEM_MC_COMMIT_EFLAGS(EFlags); \
10901 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10902 IEM_MC_END(); \
10903 break; \
10904 \
10905 case IEMMODE_64BIT: \
10906 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT); \
10907 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10908 IEM_MC_ARG(uint64_t, u64Src, 1); \
10909 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10911 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10912 \
10913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10914 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10915 IEM_MC_ASSIGN(u64Src, bImm & 0x3f); \
10916 IEMOP_HLP_DONE_DECODING(); \
10917 IEM_MC_FETCH_EFLAGS(EFlags); \
10918 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10919 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
10920 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
10921 \
10922 IEM_MC_COMMIT_EFLAGS(EFlags); \
10923 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10924 IEM_MC_END(); \
10925 break; \
10926 \
10927 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10928 } \
10929 } \
10930 } \
10931 (void)0
10932
10933/* Read-only version (bt) */
10934#define IEMOP_BODY_BIT_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10935 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10936 \
10937 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10938 { \
10939 /* register destination. */ \
10940 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10941 \
10942 switch (pVCpu->iem.s.enmEffOpSize) \
10943 { \
10944 case IEMMODE_16BIT: \
10945 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
10946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10947 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
10948 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10949 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10950 \
10951 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10952 IEM_MC_REF_EFLAGS(pEFlags); \
10953 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10954 \
10955 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10956 IEM_MC_END(); \
10957 break; \
10958 \
10959 case IEMMODE_32BIT: \
10960 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386); \
10961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10962 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
10963 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10964 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10965 \
10966 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10967 IEM_MC_REF_EFLAGS(pEFlags); \
10968 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10969 \
10970 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10971 IEM_MC_END(); \
10972 break; \
10973 \
10974 case IEMMODE_64BIT: \
10975 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT); \
10976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10977 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
10978 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10979 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10980 \
10981 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10982 IEM_MC_REF_EFLAGS(pEFlags); \
10983 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10984 \
10985 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10986 IEM_MC_END(); \
10987 break; \
10988 \
10989 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10990 } \
10991 } \
10992 else \
10993 { \
10994 /* memory destination. */ \
10995 /** @todo test negative bit offsets! */ \
10996 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10997 { \
10998 switch (pVCpu->iem.s.enmEffOpSize) \
10999 { \
11000 case IEMMODE_16BIT: \
11001 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
11002 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
11003 IEM_MC_ARG(uint16_t, u16Src, 1); \
11004 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11006 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11007 \
11008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11009 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11010 IEM_MC_ASSIGN(u16Src, bImm & 0x0f); \
11011 IEMOP_HLP_DONE_DECODING(); \
11012 IEM_MC_FETCH_EFLAGS(EFlags); \
11013 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11014 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
11015 \
11016 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
11017 IEM_MC_COMMIT_EFLAGS(EFlags); \
11018 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11019 IEM_MC_END(); \
11020 break; \
11021 \
11022 case IEMMODE_32BIT: \
11023 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386); \
11024 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
11025 IEM_MC_ARG(uint32_t, u32Src, 1); \
11026 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11028 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11029 \
11030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11031 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11032 IEM_MC_ASSIGN(u32Src, bImm & 0x1f); \
11033 IEMOP_HLP_DONE_DECODING(); \
11034 IEM_MC_FETCH_EFLAGS(EFlags); \
11035 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11036 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
11037 \
11038 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
11039 IEM_MC_COMMIT_EFLAGS(EFlags); \
11040 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11041 IEM_MC_END(); \
11042 break; \
11043 \
11044 case IEMMODE_64BIT: \
11045 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT); \
11046 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
11047 IEM_MC_ARG(uint64_t, u64Src, 1); \
11048 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11050 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11051 \
11052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11053 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11054 IEM_MC_ASSIGN(u64Src, bImm & 0x3f); \
11055 IEMOP_HLP_DONE_DECODING(); \
11056 IEM_MC_FETCH_EFLAGS(EFlags); \
11057 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11058 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
11059 \
11060 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
11061 IEM_MC_COMMIT_EFLAGS(EFlags); \
11062 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11063 IEM_MC_END(); \
11064 break; \
11065 \
11066 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11067 } \
11068 } \
11069 else \
11070 { \
11071 IEMOP_HLP_DONE_DECODING(); \
11072 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
11073 } \
11074 } \
11075 (void)0
11076
11077
11078/** Opcode 0x0f 0xba /4. */
11079FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
11080{
11081 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
11082 IEMOP_BODY_BIT_Ev_Ib_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
11083}
11084
11085
11086/** Opcode 0x0f 0xba /5. */
11087FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
11088{
11089 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
11090 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
11091 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
11092}
11093
11094
11095/** Opcode 0x0f 0xba /6. */
11096FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
11097{
11098 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
11099 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
11100 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
11101}
11102
11103
11104/** Opcode 0x0f 0xba /7. */
11105FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
11106{
11107 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
11108 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11109 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11110}
11111
11112
11113/** Opcode 0x0f 0xba. */
11114FNIEMOP_DEF(iemOp_Grp8)
11115{
11116 IEMOP_HLP_MIN_386();
11117 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11118 switch (IEM_GET_MODRM_REG_8(bRm))
11119 {
11120 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
11121 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
11122 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
11123 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
11124
11125 case 0: case 1: case 2: case 3:
11126 /* Both AMD and Intel want full modr/m decoding and imm8. */
11127 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
11128
11129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11130 }
11131}
11132
11133
11134/** Opcode 0x0f 0xbb. */
11135FNIEMOP_DEF(iemOp_btc_Ev_Gv)
11136{
11137 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
11138 IEMOP_HLP_MIN_386();
11139 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11140 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11141}
11142
11143
11144/**
11145 * Common worker for BSF and BSR instructions.
11146 *
11147 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
11148 * the destination register, which means that for 32-bit operations the high
11149 * bits must be left alone.
11150 *
11151 * @param pImpl Pointer to the instruction implementation (assembly).
11152 */
11153FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
11154{
11155 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11156
11157 /*
11158 * If rm is denoting a register, no more instruction bytes.
11159 */
11160 if (IEM_IS_MODRM_REG_MODE(bRm))
11161 {
11162 switch (pVCpu->iem.s.enmEffOpSize)
11163 {
11164 case IEMMODE_16BIT:
11165 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386);
11166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11167 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11168 IEM_MC_ARG(uint16_t, u16Src, 1);
11169 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11170
11171 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11172 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11173 IEM_MC_REF_EFLAGS(pEFlags);
11174 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11175
11176 IEM_MC_ADVANCE_RIP_AND_FINISH();
11177 IEM_MC_END();
11178 break;
11179
11180 case IEMMODE_32BIT:
11181 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386);
11182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11183 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11184 IEM_MC_ARG(uint32_t, u32Src, 1);
11185 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11186
11187 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11188 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11189 IEM_MC_REF_EFLAGS(pEFlags);
11190 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11191 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11192 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11193 } IEM_MC_ENDIF();
11194 IEM_MC_ADVANCE_RIP_AND_FINISH();
11195 IEM_MC_END();
11196 break;
11197
11198 case IEMMODE_64BIT:
11199 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT);
11200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11201 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11202 IEM_MC_ARG(uint64_t, u64Src, 1);
11203 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11204
11205 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11206 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11207 IEM_MC_REF_EFLAGS(pEFlags);
11208 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11209
11210 IEM_MC_ADVANCE_RIP_AND_FINISH();
11211 IEM_MC_END();
11212 break;
11213
11214 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11215 }
11216 }
11217 else
11218 {
11219 /*
11220 * We're accessing memory.
11221 */
11222 switch (pVCpu->iem.s.enmEffOpSize)
11223 {
11224 case IEMMODE_16BIT:
11225 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386);
11226 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11227 IEM_MC_ARG(uint16_t, u16Src, 1);
11228 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11230
11231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11233 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11234 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11235 IEM_MC_REF_EFLAGS(pEFlags);
11236 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11237
11238 IEM_MC_ADVANCE_RIP_AND_FINISH();
11239 IEM_MC_END();
11240 break;
11241
11242 case IEMMODE_32BIT:
11243 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386);
11244 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11245 IEM_MC_ARG(uint32_t, u32Src, 1);
11246 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11248
11249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11251 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11252 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11253 IEM_MC_REF_EFLAGS(pEFlags);
11254 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11255
11256 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11257 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11258 } IEM_MC_ENDIF();
11259 IEM_MC_ADVANCE_RIP_AND_FINISH();
11260 IEM_MC_END();
11261 break;
11262
11263 case IEMMODE_64BIT:
11264 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT);
11265 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11266 IEM_MC_ARG(uint64_t, u64Src, 1);
11267 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11268 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11269
11270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11272 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11273 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11274 IEM_MC_REF_EFLAGS(pEFlags);
11275 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11276
11277 IEM_MC_ADVANCE_RIP_AND_FINISH();
11278 IEM_MC_END();
11279 break;
11280
11281 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11282 }
11283 }
11284}
11285
11286
11287/** Opcode 0x0f 0xbc. */
11288FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11289{
11290 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11291 IEMOP_HLP_MIN_386();
11292 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11293 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
11294}
11295
11296
11297/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
11298FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11299{
11300 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11301 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11302 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11303
11304#ifndef TST_IEM_CHECK_MC
11305 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
11306 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11307 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
11308 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11309 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
11310 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11311 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11312 {
11313 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11314 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11315 };
11316#endif
11317 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11318 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11319 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11320 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_NOT_286_OR_OLDER);
11321}
11322
11323
11324/** Opcode 0x0f 0xbd. */
11325FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11326{
11327 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11328 IEMOP_HLP_MIN_386();
11329 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11330 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
11331}
11332
11333
11334/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
11335FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11336{
11337 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11338 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11339 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11340
11341#ifndef TST_IEM_CHECK_MC
11342 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11343 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11344 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11345 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11346 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11347 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11348 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11349 {
11350 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11351 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11352 };
11353#endif
11354 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11355 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11356 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11357 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_NOT_286_OR_OLDER);
11358}
11359
11360
11361
11362/** Opcode 0x0f 0xbe. */
11363FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11364{
11365 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11366 IEMOP_HLP_MIN_386();
11367
11368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11369
11370 /*
11371 * If rm is denoting a register, no more instruction bytes.
11372 */
11373 if (IEM_IS_MODRM_REG_MODE(bRm))
11374 {
11375 switch (pVCpu->iem.s.enmEffOpSize)
11376 {
11377 case IEMMODE_16BIT:
11378 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
11379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11380 IEM_MC_LOCAL(uint16_t, u16Value);
11381 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11382 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11383 IEM_MC_ADVANCE_RIP_AND_FINISH();
11384 IEM_MC_END();
11385 break;
11386
11387 case IEMMODE_32BIT:
11388 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
11389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11390 IEM_MC_LOCAL(uint32_t, u32Value);
11391 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11392 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11393 IEM_MC_ADVANCE_RIP_AND_FINISH();
11394 IEM_MC_END();
11395 break;
11396
11397 case IEMMODE_64BIT:
11398 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
11399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11400 IEM_MC_LOCAL(uint64_t, u64Value);
11401 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11402 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11403 IEM_MC_ADVANCE_RIP_AND_FINISH();
11404 IEM_MC_END();
11405 break;
11406
11407 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11408 }
11409 }
11410 else
11411 {
11412 /*
11413 * We're loading a register from memory.
11414 */
11415 switch (pVCpu->iem.s.enmEffOpSize)
11416 {
11417 case IEMMODE_16BIT:
11418 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
11419 IEM_MC_LOCAL(uint16_t, u16Value);
11420 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11423 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11424 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11425 IEM_MC_ADVANCE_RIP_AND_FINISH();
11426 IEM_MC_END();
11427 break;
11428
11429 case IEMMODE_32BIT:
11430 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
11431 IEM_MC_LOCAL(uint32_t, u32Value);
11432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11435 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11436 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11437 IEM_MC_ADVANCE_RIP_AND_FINISH();
11438 IEM_MC_END();
11439 break;
11440
11441 case IEMMODE_64BIT:
11442 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
11443 IEM_MC_LOCAL(uint64_t, u64Value);
11444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11447 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11448 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11449 IEM_MC_ADVANCE_RIP_AND_FINISH();
11450 IEM_MC_END();
11451 break;
11452
11453 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11454 }
11455 }
11456}
11457
11458
11459/** Opcode 0x0f 0xbf. */
11460FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11461{
11462 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11463 IEMOP_HLP_MIN_386();
11464
11465 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11466
11467 /** @todo Not entirely sure how the operand size prefix is handled here,
11468 * assuming that it will be ignored. Would be nice to have a few
11469 * test for this. */
11470 /*
11471 * If rm is denoting a register, no more instruction bytes.
11472 */
11473 if (IEM_IS_MODRM_REG_MODE(bRm))
11474 {
11475 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11476 {
11477 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386);
11478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11479 IEM_MC_LOCAL(uint32_t, u32Value);
11480 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11481 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11482 IEM_MC_ADVANCE_RIP_AND_FINISH();
11483 IEM_MC_END();
11484 }
11485 else
11486 {
11487 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT);
11488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11489 IEM_MC_LOCAL(uint64_t, u64Value);
11490 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11491 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11492 IEM_MC_ADVANCE_RIP_AND_FINISH();
11493 IEM_MC_END();
11494 }
11495 }
11496 else
11497 {
11498 /*
11499 * We're loading a register from memory.
11500 */
11501 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11502 {
11503 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
11504 IEM_MC_LOCAL(uint32_t, u32Value);
11505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11508 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11509 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11510 IEM_MC_ADVANCE_RIP_AND_FINISH();
11511 IEM_MC_END();
11512 }
11513 else
11514 {
11515 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
11516 IEM_MC_LOCAL(uint64_t, u64Value);
11517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11520 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11521 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11522 IEM_MC_ADVANCE_RIP_AND_FINISH();
11523 IEM_MC_END();
11524 }
11525 }
11526}
11527
11528
11529/** Opcode 0x0f 0xc0. */
11530FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11531{
11532 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11533 IEMOP_HLP_MIN_486();
11534 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11535
11536 /*
11537 * If rm is denoting a register, no more instruction bytes.
11538 */
11539 if (IEM_IS_MODRM_REG_MODE(bRm))
11540 {
11541 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486);
11542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11543 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11544 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11545 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11546
11547 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11548 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11549 IEM_MC_REF_EFLAGS(pEFlags);
11550 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11551
11552 IEM_MC_ADVANCE_RIP_AND_FINISH();
11553 IEM_MC_END();
11554 }
11555 else
11556 {
11557 /*
11558 * We're accessing memory.
11559 */
11560 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486);
11561 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11562 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11563 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11564 IEM_MC_LOCAL(uint8_t, u8RegCopy);
11565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11566 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11567
11568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11569 IEMOP_HLP_DONE_DECODING();
11570 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11571 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11572 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
11573 IEM_MC_FETCH_EFLAGS(EFlags);
11574 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11575 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11576 else
11577 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
11578
11579 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
11580 IEM_MC_COMMIT_EFLAGS(EFlags);
11581 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
11582 IEM_MC_ADVANCE_RIP_AND_FINISH();
11583 IEM_MC_END();
11584 }
11585}
11586
11587
11588/** Opcode 0x0f 0xc1. */
11589FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11590{
11591 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11592 IEMOP_HLP_MIN_486();
11593 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11594
11595 /*
11596 * If rm is denoting a register, no more instruction bytes.
11597 */
11598 if (IEM_IS_MODRM_REG_MODE(bRm))
11599 {
11600 switch (pVCpu->iem.s.enmEffOpSize)
11601 {
11602 case IEMMODE_16BIT:
11603 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486);
11604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11605 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11606 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11607 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11608
11609 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11610 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11611 IEM_MC_REF_EFLAGS(pEFlags);
11612 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11613
11614 IEM_MC_ADVANCE_RIP_AND_FINISH();
11615 IEM_MC_END();
11616 break;
11617
11618 case IEMMODE_32BIT:
11619 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486);
11620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11621 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11622 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11623 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11624
11625 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11626 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11627 IEM_MC_REF_EFLAGS(pEFlags);
11628 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11629
11630 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11631 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
11632 IEM_MC_ADVANCE_RIP_AND_FINISH();
11633 IEM_MC_END();
11634 break;
11635
11636 case IEMMODE_64BIT:
11637 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT);
11638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11639 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11640 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11641 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11642
11643 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11644 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11645 IEM_MC_REF_EFLAGS(pEFlags);
11646 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11647
11648 IEM_MC_ADVANCE_RIP_AND_FINISH();
11649 IEM_MC_END();
11650 break;
11651
11652 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11653 }
11654 }
11655 else
11656 {
11657 /*
11658 * We're accessing memory.
11659 */
11660 switch (pVCpu->iem.s.enmEffOpSize)
11661 {
11662 case IEMMODE_16BIT:
11663 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486);
11664 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11665 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11666 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11667 IEM_MC_LOCAL(uint16_t, u16RegCopy);
11668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11669 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11670
11671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11672 IEMOP_HLP_DONE_DECODING();
11673 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11674 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11675 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
11676 IEM_MC_FETCH_EFLAGS(EFlags);
11677 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11678 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11679 else
11680 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
11681
11682 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
11683 IEM_MC_COMMIT_EFLAGS(EFlags);
11684 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
11685 IEM_MC_ADVANCE_RIP_AND_FINISH();
11686 IEM_MC_END();
11687 break;
11688
11689 case IEMMODE_32BIT:
11690 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486);
11691 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11692 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11693 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11694 IEM_MC_LOCAL(uint32_t, u32RegCopy);
11695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11696 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11697
11698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11699 IEMOP_HLP_DONE_DECODING();
11700 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11701 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11702 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
11703 IEM_MC_FETCH_EFLAGS(EFlags);
11704 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11705 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11706 else
11707 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
11708
11709 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
11710 IEM_MC_COMMIT_EFLAGS(EFlags);
11711 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
11712 IEM_MC_ADVANCE_RIP_AND_FINISH();
11713 IEM_MC_END();
11714 break;
11715
11716 case IEMMODE_64BIT:
11717 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT);
11718 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11719 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11720 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11721 IEM_MC_LOCAL(uint64_t, u64RegCopy);
11722 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11723 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11724
11725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11726 IEMOP_HLP_DONE_DECODING();
11727 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11728 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11729 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
11730 IEM_MC_FETCH_EFLAGS(EFlags);
11731 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11732 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11733 else
11734 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
11735
11736 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
11737 IEM_MC_COMMIT_EFLAGS(EFlags);
11738 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
11739 IEM_MC_ADVANCE_RIP_AND_FINISH();
11740 IEM_MC_END();
11741 break;
11742
11743 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11744 }
11745 }
11746}
11747
11748
11749/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11750FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11751{
11752 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11753
11754 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11755 if (IEM_IS_MODRM_REG_MODE(bRm))
11756 {
11757 /*
11758 * XMM, XMM.
11759 */
11760 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER);
11761 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11763 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11764 IEM_MC_LOCAL(X86XMMREG, Dst);
11765 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11766 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11767 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11768 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11769 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11770 IEM_MC_PREPARE_SSE_USAGE();
11771 IEM_MC_REF_MXCSR(pfMxcsr);
11772 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11773 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11774 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11775 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11776 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11777 } IEM_MC_ELSE() {
11778 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11779 } IEM_MC_ENDIF();
11780
11781 IEM_MC_ADVANCE_RIP_AND_FINISH();
11782 IEM_MC_END();
11783 }
11784 else
11785 {
11786 /*
11787 * XMM, [mem128].
11788 */
11789 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER);
11790 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11791 IEM_MC_LOCAL(X86XMMREG, Dst);
11792 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11793 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11794 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11796
11797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11798 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11799 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11801 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11802 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11803
11804 IEM_MC_PREPARE_SSE_USAGE();
11805 IEM_MC_REF_MXCSR(pfMxcsr);
11806 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11807 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11808 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11809 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11810 } IEM_MC_ELSE() {
11811 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11812 } IEM_MC_ENDIF();
11813
11814 IEM_MC_ADVANCE_RIP_AND_FINISH();
11815 IEM_MC_END();
11816 }
11817}
11818
11819
11820/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11821FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11822{
11823 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11824
11825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11826 if (IEM_IS_MODRM_REG_MODE(bRm))
11827 {
11828 /*
11829 * XMM, XMM.
11830 */
11831 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER);
11832 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11834 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11835 IEM_MC_LOCAL(X86XMMREG, Dst);
11836 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11837 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11838 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11839 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11840 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11841 IEM_MC_PREPARE_SSE_USAGE();
11842 IEM_MC_REF_MXCSR(pfMxcsr);
11843 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11844 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11845 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11846 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11847 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11848 } IEM_MC_ELSE() {
11849 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11850 } IEM_MC_ENDIF();
11851
11852 IEM_MC_ADVANCE_RIP_AND_FINISH();
11853 IEM_MC_END();
11854 }
11855 else
11856 {
11857 /*
11858 * XMM, [mem128].
11859 */
11860 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER);
11861 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11862 IEM_MC_LOCAL(X86XMMREG, Dst);
11863 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11864 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11865 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11867
11868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11869 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11870 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11872 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11873 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11874
11875 IEM_MC_PREPARE_SSE_USAGE();
11876 IEM_MC_REF_MXCSR(pfMxcsr);
11877 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11878 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11879 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11880 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11881 } IEM_MC_ELSE() {
11882 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11883 } IEM_MC_ENDIF();
11884
11885 IEM_MC_ADVANCE_RIP_AND_FINISH();
11886 IEM_MC_END();
11887 }
11888}
11889
11890
11891/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11892FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11893{
11894 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11895
11896 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11897 if (IEM_IS_MODRM_REG_MODE(bRm))
11898 {
11899 /*
11900 * XMM32, XMM32.
11901 */
11902 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER);
11903 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11905 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11906 IEM_MC_LOCAL(X86XMMREG, Dst);
11907 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11908 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11909 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11910 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11911 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11912 IEM_MC_PREPARE_SSE_USAGE();
11913 IEM_MC_REF_MXCSR(pfMxcsr);
11914 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11915 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11916 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11917 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11918 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11919 } IEM_MC_ELSE() {
11920 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11921 } IEM_MC_ENDIF();
11922
11923 IEM_MC_ADVANCE_RIP_AND_FINISH();
11924 IEM_MC_END();
11925 }
11926 else
11927 {
11928 /*
11929 * XMM32, [mem32].
11930 */
11931 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER);
11932 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11933 IEM_MC_LOCAL(X86XMMREG, Dst);
11934 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11935 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11936 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11938
11939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11940 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11941 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11943 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11944 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11945
11946 IEM_MC_PREPARE_SSE_USAGE();
11947 IEM_MC_REF_MXCSR(pfMxcsr);
11948 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11949 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11950 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11951 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11952 } IEM_MC_ELSE() {
11953 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11954 } IEM_MC_ENDIF();
11955
11956 IEM_MC_ADVANCE_RIP_AND_FINISH();
11957 IEM_MC_END();
11958 }
11959}
11960
11961
11962/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11963FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11964{
11965 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11966
11967 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11968 if (IEM_IS_MODRM_REG_MODE(bRm))
11969 {
11970 /*
11971 * XMM64, XMM64.
11972 */
11973 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER);
11974 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11976 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11977 IEM_MC_LOCAL(X86XMMREG, Dst);
11978 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11979 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11980 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11981 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11982 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11983 IEM_MC_PREPARE_SSE_USAGE();
11984 IEM_MC_REF_MXCSR(pfMxcsr);
11985 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11986 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11987 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11988 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11989 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11990 } IEM_MC_ELSE() {
11991 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11992 } IEM_MC_ENDIF();
11993
11994 IEM_MC_ADVANCE_RIP_AND_FINISH();
11995 IEM_MC_END();
11996 }
11997 else
11998 {
11999 /*
12000 * XMM64, [mem64].
12001 */
12002 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER);
12003 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12004 IEM_MC_LOCAL(X86XMMREG, Dst);
12005 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12006 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12007 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12008 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12009
12010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12011 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12012 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12014 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12015 IEM_MC_FETCH_MEM_XMM_U64(Src.uSrc2, 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12016
12017 IEM_MC_PREPARE_SSE_USAGE();
12018 IEM_MC_REF_MXCSR(pfMxcsr);
12019 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
12020 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
12021 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12022 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12023 } IEM_MC_ELSE() {
12024 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
12025 } IEM_MC_ENDIF();
12026
12027 IEM_MC_ADVANCE_RIP_AND_FINISH();
12028 IEM_MC_END();
12029 }
12030}
12031
12032
12033/** Opcode 0x0f 0xc3. */
12034FNIEMOP_DEF(iemOp_movnti_My_Gy)
12035{
12036 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
12037
12038 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12039
12040 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
12041 if (IEM_IS_MODRM_MEM_MODE(bRm))
12042 {
12043 switch (pVCpu->iem.s.enmEffOpSize)
12044 {
12045 case IEMMODE_32BIT:
12046 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386);
12047 IEM_MC_LOCAL(uint32_t, u32Value);
12048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12049
12050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12052
12053 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12054 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
12055 IEM_MC_ADVANCE_RIP_AND_FINISH();
12056 IEM_MC_END();
12057 break;
12058
12059 case IEMMODE_64BIT:
12060 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT);
12061 IEM_MC_LOCAL(uint64_t, u64Value);
12062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12063
12064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12066
12067 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12068 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
12069 IEM_MC_ADVANCE_RIP_AND_FINISH();
12070 IEM_MC_END();
12071 break;
12072
12073 case IEMMODE_16BIT:
12074 /** @todo check this form. */
12075 IEMOP_RAISE_INVALID_OPCODE_RET();
12076
12077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12078 }
12079 }
12080 else
12081 IEMOP_RAISE_INVALID_OPCODE_RET();
12082}
12083
12084
12085/* Opcode 0x66 0x0f 0xc3 - invalid */
12086/* Opcode 0xf3 0x0f 0xc3 - invalid */
12087/* Opcode 0xf2 0x0f 0xc3 - invalid */
12088
12089
12090/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
12091FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
12092{
12093 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12094 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12095 if (IEM_IS_MODRM_REG_MODE(bRm))
12096 {
12097 /*
12098 * Register, register.
12099 */
12100 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER);
12101 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12103 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12104 IEM_MC_ARG(uint16_t, u16Src, 1);
12105 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12106 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12107 IEM_MC_PREPARE_FPU_USAGE();
12108 IEM_MC_FPU_TO_MMX_MODE();
12109 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
12110 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
12111 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
12112 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
12113 IEM_MC_ADVANCE_RIP_AND_FINISH();
12114 IEM_MC_END();
12115 }
12116 else
12117 {
12118 /*
12119 * Register, memory.
12120 */
12121 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER);
12122 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12123 IEM_MC_ARG(uint16_t, u16Src, 1);
12124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12125
12126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12127 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12128 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12130 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12131 IEM_MC_PREPARE_FPU_USAGE();
12132 IEM_MC_FPU_TO_MMX_MODE();
12133
12134 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12135 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
12136 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
12137 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
12138 IEM_MC_ADVANCE_RIP_AND_FINISH();
12139 IEM_MC_END();
12140 }
12141}
12142
12143
12144/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
12145FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
12146{
12147 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12148 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12149 if (IEM_IS_MODRM_REG_MODE(bRm))
12150 {
12151 /*
12152 * Register, register.
12153 */
12154 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER);
12155 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12157 IEM_MC_ARG(PRTUINT128U, puDst, 0);
12158 IEM_MC_ARG(uint16_t, u16Src, 1);
12159 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12160 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12161 IEM_MC_PREPARE_SSE_USAGE();
12162 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
12163 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12164 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
12165 IEM_MC_ADVANCE_RIP_AND_FINISH();
12166 IEM_MC_END();
12167 }
12168 else
12169 {
12170 /*
12171 * Register, memory.
12172 */
12173 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER);
12174 IEM_MC_ARG(PRTUINT128U, puDst, 0);
12175 IEM_MC_ARG(uint16_t, u16Src, 1);
12176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12177
12178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12179 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12180 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12182 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12183 IEM_MC_PREPARE_SSE_USAGE();
12184
12185 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12186 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12187 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
12188 IEM_MC_ADVANCE_RIP_AND_FINISH();
12189 IEM_MC_END();
12190 }
12191}
12192
12193
12194/* Opcode 0xf3 0x0f 0xc4 - invalid */
12195/* Opcode 0xf2 0x0f 0xc4 - invalid */
12196
12197
12198/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
12199FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
12200{
12201 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
12202 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12203 if (IEM_IS_MODRM_REG_MODE(bRm))
12204 {
12205 /*
12206 * Greg32, MMX, imm8.
12207 */
12208 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER);
12209 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12211 IEM_MC_LOCAL(uint16_t, u16Dst);
12212 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
12213 IEM_MC_ARG(uint64_t, u64Src, 1);
12214 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12215 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12216 IEM_MC_PREPARE_FPU_USAGE();
12217 IEM_MC_FPU_TO_MMX_MODE();
12218 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
12219 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u64, pu16Dst, u64Src, bImmArg);
12220 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12221 IEM_MC_ADVANCE_RIP_AND_FINISH();
12222 IEM_MC_END();
12223 }
12224 /* No memory operand. */
12225 else
12226 IEMOP_RAISE_INVALID_OPCODE_RET();
12227}
12228
12229
12230/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
12231FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
12232{
12233 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12235 if (IEM_IS_MODRM_REG_MODE(bRm))
12236 {
12237 /*
12238 * Greg32, XMM, imm8.
12239 */
12240 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER);
12241 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12243 IEM_MC_LOCAL(uint16_t, u16Dst);
12244 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
12245 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12246 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12247 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12248 IEM_MC_PREPARE_SSE_USAGE();
12249 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12250 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u128, pu16Dst, puSrc, bImmArg);
12251 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12252 IEM_MC_ADVANCE_RIP_AND_FINISH();
12253 IEM_MC_END();
12254 }
12255 /* No memory operand. */
12256 else
12257 IEMOP_RAISE_INVALID_OPCODE_RET();
12258}
12259
12260
12261/* Opcode 0xf3 0x0f 0xc5 - invalid */
12262/* Opcode 0xf2 0x0f 0xc5 - invalid */
12263
12264
12265/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
12266FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
12267{
12268 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12269 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12270 if (IEM_IS_MODRM_REG_MODE(bRm))
12271 {
12272 /*
12273 * XMM, XMM, imm8.
12274 */
12275 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER);
12276 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12278 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12279 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12280 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12281 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12282 IEM_MC_PREPARE_SSE_USAGE();
12283 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12284 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12285 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12286 IEM_MC_ADVANCE_RIP_AND_FINISH();
12287 IEM_MC_END();
12288 }
12289 else
12290 {
12291 /*
12292 * XMM, [mem128], imm8.
12293 */
12294 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER);
12295 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12296 IEM_MC_LOCAL(RTUINT128U, uSrc);
12297 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12298 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12299
12300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12301 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12302 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12304 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12305 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12306
12307 IEM_MC_PREPARE_SSE_USAGE();
12308 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12309 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12310
12311 IEM_MC_ADVANCE_RIP_AND_FINISH();
12312 IEM_MC_END();
12313 }
12314}
12315
12316
12317/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12318FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12319{
12320 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12321 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12322 if (IEM_IS_MODRM_REG_MODE(bRm))
12323 {
12324 /*
12325 * XMM, XMM, imm8.
12326 */
12327 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER);
12328 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12330 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12331 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12332 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12333 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12334 IEM_MC_PREPARE_SSE_USAGE();
12335 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12336 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12337 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12338 IEM_MC_ADVANCE_RIP_AND_FINISH();
12339 IEM_MC_END();
12340 }
12341 else
12342 {
12343 /*
12344 * XMM, [mem128], imm8.
12345 */
12346 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER);
12347 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12348 IEM_MC_LOCAL(RTUINT128U, uSrc);
12349 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12351
12352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12353 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12354 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12356 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12357 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12358
12359 IEM_MC_PREPARE_SSE_USAGE();
12360 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12361 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12362
12363 IEM_MC_ADVANCE_RIP_AND_FINISH();
12364 IEM_MC_END();
12365 }
12366}
12367
12368
12369/* Opcode 0xf3 0x0f 0xc6 - invalid */
12370/* Opcode 0xf2 0x0f 0xc6 - invalid */
12371
12372
12373/** Opcode 0x0f 0xc7 !11/1. */
12374FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12375{
12376 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12377
12378 IEM_MC_BEGIN(4, 5, IEM_MC_F_NOT_286_OR_OLDER);
12379 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
12380 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
12381 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
12382 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12383 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
12384 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
12385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12386 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12387
12388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12389 IEMOP_HLP_DONE_DECODING_EX(fCmpXchg8b);
12390 IEM_MC_MEM_MAP_U64_RW(pu64MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12391
12392 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
12393 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
12394 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
12395
12396 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
12397 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
12398 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
12399
12400 IEM_MC_FETCH_EFLAGS(EFlags);
12401 if ( !(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)
12402 && (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12403 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12404 else
12405 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12406
12407 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64MemDst, bUnmapInfo);
12408 IEM_MC_COMMIT_EFLAGS(EFlags);
12409 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12410 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
12411 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
12412 } IEM_MC_ENDIF();
12413 IEM_MC_ADVANCE_RIP_AND_FINISH();
12414
12415 IEM_MC_END();
12416}
12417
12418
12419/** Opcode REX.W 0x0f 0xc7 !11/1. */
12420FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12421{
12422 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12423 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12424 {
12425 IEM_MC_BEGIN(4, 3, IEM_MC_F_64BIT);
12426 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0);
12427 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1);
12428 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2);
12429 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12430 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx);
12431 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx);
12432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12433
12434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12435 IEMOP_HLP_DONE_DECODING();
12436 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16);
12437 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12438
12439 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX);
12440 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX);
12441 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx);
12442
12443 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX);
12444 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX);
12445 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx);
12446
12447 IEM_MC_FETCH_EFLAGS(EFlags);
12448
12449#ifdef RT_ARCH_AMD64 /* some code duplication here because IEMAllInstPython.py cannot parse if/else/#if spaghetti. */
12450 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12451 {
12452 if ( !(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)
12453 && (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12454 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12455 else
12456 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12457 }
12458 else
12459 { /* (see comments in #else case below) */
12460 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12461 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12462 else
12463 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12464 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12465 }
12466
12467#elif defined(RT_ARCH_ARM64)
12468 /** @todo may require fallback for unaligned accesses... */
12469 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12470 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12471 else
12472 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12473
12474#else
12475 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12476 accesses and not all all atomic, which works fine on in UNI CPU guest
12477 configuration (ignoring DMA). If guest SMP is active we have no choice
12478 but to use a rendezvous callback here. Sigh. */
12479 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12480 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12481 else
12482 {
12483 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12484 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12485 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12486 }
12487#endif
12488
12489 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW);
12490 IEM_MC_COMMIT_EFLAGS(EFlags);
12491 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12492 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo);
12493 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi);
12494 } IEM_MC_ENDIF();
12495 IEM_MC_ADVANCE_RIP_AND_FINISH();
12496
12497 IEM_MC_END();
12498 }
12499 Log(("cmpxchg16b -> #UD\n"));
12500 IEMOP_RAISE_INVALID_OPCODE_RET();
12501}
12502
12503FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12504{
12505 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12506 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12507 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12508}
12509
12510
12511/** Opcode 0x0f 0xc7 11/6. */
12512FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12513{
12514 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12515 IEMOP_RAISE_INVALID_OPCODE_RET();
12516
12517 if (IEM_IS_MODRM_REG_MODE(bRm))
12518 {
12519 /* register destination. */
12520 switch (pVCpu->iem.s.enmEffOpSize)
12521 {
12522 case IEMMODE_16BIT:
12523 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
12524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12525 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12526 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12527
12528 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12529 IEM_MC_REF_EFLAGS(pEFlags);
12530 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u16, iemAImpl_rdrand_u16_fallback),
12531 pu16Dst, pEFlags);
12532
12533 IEM_MC_ADVANCE_RIP_AND_FINISH();
12534 IEM_MC_END();
12535 break;
12536
12537 case IEMMODE_32BIT:
12538 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386);
12539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12540 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12541 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12542
12543 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12544 IEM_MC_REF_EFLAGS(pEFlags);
12545 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u32, iemAImpl_rdrand_u32_fallback),
12546 pu32Dst, pEFlags);
12547
12548 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12549 IEM_MC_ADVANCE_RIP_AND_FINISH();
12550 IEM_MC_END();
12551 break;
12552
12553 case IEMMODE_64BIT:
12554 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT);
12555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12556 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12557 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12558
12559 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12560 IEM_MC_REF_EFLAGS(pEFlags);
12561 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdRand, iemAImpl_rdrand_u64, iemAImpl_rdrand_u64_fallback),
12562 pu64Dst, pEFlags);
12563
12564 IEM_MC_ADVANCE_RIP_AND_FINISH();
12565 IEM_MC_END();
12566 break;
12567
12568 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12569 }
12570 }
12571 /* Register only. */
12572 else
12573 IEMOP_RAISE_INVALID_OPCODE_RET();
12574}
12575
12576/** Opcode 0x0f 0xc7 !11/6. */
12577#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12578FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12579{
12580 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12581 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12582 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12583 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
12584 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12585 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12587 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12588 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12589 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12590 IEM_MC_END();
12591}
12592#else
12593FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12594#endif
12595
12596/** Opcode 0x66 0x0f 0xc7 !11/6. */
12597#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12598FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12599{
12600 IEMOP_MNEMONIC(vmclear, "vmclear");
12601 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12602 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12603 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
12604 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12605 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12607 IEMOP_HLP_DONE_DECODING();
12608 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12609 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12610 IEM_MC_END();
12611}
12612#else
12613FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12614#endif
12615
12616/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12617#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12618FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12619{
12620 IEMOP_MNEMONIC(vmxon, "vmxon");
12621 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12622 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
12623 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12624 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12626 IEMOP_HLP_DONE_DECODING();
12627 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12628 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12629 IEM_MC_END();
12630}
12631#else
12632FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12633#endif
12634
12635/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12636#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12637FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12638{
12639 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12640 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12641 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12642 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
12643 IEM_MC_ARG(uint8_t, iEffSeg, 0);
12644 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12646 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12647 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
12648 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12649 IEM_MC_END();
12650}
12651#else
12652FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12653#endif
12654
12655/** Opcode 0x0f 0xc7 11/7. */
12656FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12657{
12658 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12659 IEMOP_RAISE_INVALID_OPCODE_RET();
12660
12661 if (IEM_IS_MODRM_REG_MODE(bRm))
12662 {
12663 /* register destination. */
12664 switch (pVCpu->iem.s.enmEffOpSize)
12665 {
12666 case IEMMODE_16BIT:
12667 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
12668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12669 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12670 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12671
12672 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12673 IEM_MC_REF_EFLAGS(pEFlags);
12674 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u16, iemAImpl_rdseed_u16_fallback),
12675 pu16Dst, pEFlags);
12676
12677 IEM_MC_ADVANCE_RIP_AND_FINISH();
12678 IEM_MC_END();
12679 break;
12680
12681 case IEMMODE_32BIT:
12682 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386);
12683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12684 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12685 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12686
12687 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12688 IEM_MC_REF_EFLAGS(pEFlags);
12689 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u32, iemAImpl_rdseed_u32_fallback),
12690 pu32Dst, pEFlags);
12691
12692 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12693 IEM_MC_ADVANCE_RIP_AND_FINISH();
12694 IEM_MC_END();
12695 break;
12696
12697 case IEMMODE_64BIT:
12698 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT);
12699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12700 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12701 IEM_MC_ARG(uint32_t *, pEFlags, 1);
12702
12703 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12704 IEM_MC_REF_EFLAGS(pEFlags);
12705 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fRdSeed, iemAImpl_rdseed_u64, iemAImpl_rdseed_u64_fallback),
12706 pu64Dst, pEFlags);
12707
12708 IEM_MC_ADVANCE_RIP_AND_FINISH();
12709 IEM_MC_END();
12710 break;
12711
12712 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12713 }
12714 }
12715 /* Register only. */
12716 else
12717 IEMOP_RAISE_INVALID_OPCODE_RET();
12718}
12719
12720/**
12721 * Group 9 jump table for register variant.
12722 */
12723IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12724{ /* pfx: none, 066h, 0f3h, 0f2h */
12725 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12726 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12727 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12728 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12729 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12730 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12731 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12732 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12733};
12734AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12735
12736
12737/**
12738 * Group 9 jump table for memory variant.
12739 */
12740IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12741{ /* pfx: none, 066h, 0f3h, 0f2h */
12742 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12743 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12744 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12745 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12746 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12747 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12748 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12749 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12750};
12751AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12752
12753
12754/** Opcode 0x0f 0xc7. */
12755FNIEMOP_DEF(iemOp_Grp9)
12756{
12757 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12758 if (IEM_IS_MODRM_REG_MODE(bRm))
12759 /* register, register */
12760 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12761 + pVCpu->iem.s.idxPrefix], bRm);
12762 /* memory, register */
12763 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12764 + pVCpu->iem.s.idxPrefix], bRm);
12765}
12766
12767
12768/**
12769 * Common 'bswap register' helper.
12770 */
12771FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12772{
12773 switch (pVCpu->iem.s.enmEffOpSize)
12774 {
12775 case IEMMODE_16BIT:
12776 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_486);
12777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12778 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12779 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12780 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12781 IEM_MC_ADVANCE_RIP_AND_FINISH();
12782 IEM_MC_END();
12783 break;
12784
12785 case IEMMODE_32BIT:
12786 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_486);
12787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12788 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12789 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12790 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12791 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12792 IEM_MC_ADVANCE_RIP_AND_FINISH();
12793 IEM_MC_END();
12794 break;
12795
12796 case IEMMODE_64BIT:
12797 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT);
12798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12799 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12800 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12801 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12802 IEM_MC_ADVANCE_RIP_AND_FINISH();
12803 IEM_MC_END();
12804 break;
12805
12806 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12807 }
12808}
12809
12810
12811/** Opcode 0x0f 0xc8. */
12812FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12813{
12814 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12815 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12816 prefix. REX.B is the correct prefix it appears. For a parallel
12817 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12818 IEMOP_HLP_MIN_486();
12819 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12820}
12821
12822
12823/** Opcode 0x0f 0xc9. */
12824FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12825{
12826 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12827 IEMOP_HLP_MIN_486();
12828 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12829}
12830
12831
12832/** Opcode 0x0f 0xca. */
12833FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12834{
12835 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12836 IEMOP_HLP_MIN_486();
12837 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12838}
12839
12840
12841/** Opcode 0x0f 0xcb. */
12842FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12843{
12844 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12845 IEMOP_HLP_MIN_486();
12846 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12847}
12848
12849
12850/** Opcode 0x0f 0xcc. */
12851FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12852{
12853 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12854 IEMOP_HLP_MIN_486();
12855 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12856}
12857
12858
12859/** Opcode 0x0f 0xcd. */
12860FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12861{
12862 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12863 IEMOP_HLP_MIN_486();
12864 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12865}
12866
12867
12868/** Opcode 0x0f 0xce. */
12869FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12870{
12871 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12872 IEMOP_HLP_MIN_486();
12873 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12874}
12875
12876
12877/** Opcode 0x0f 0xcf. */
12878FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12879{
12880 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12881 IEMOP_HLP_MIN_486();
12882 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12883}
12884
12885
12886/* Opcode 0x0f 0xd0 - invalid */
12887
12888
12889/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12890FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12891{
12892 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12893 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12894}
12895
12896
12897/* Opcode 0xf3 0x0f 0xd0 - invalid */
12898
12899
12900/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12901FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12902{
12903 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12904 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12905}
12906
12907
12908
12909/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12910FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12911{
12912 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12913 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12914}
12915
12916/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12917FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12918{
12919 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12920 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12921}
12922
12923/* Opcode 0xf3 0x0f 0xd1 - invalid */
12924/* Opcode 0xf2 0x0f 0xd1 - invalid */
12925
12926/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12927FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12928{
12929 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12930 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12931}
12932
12933
12934/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12935FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12936{
12937 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12938 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12939}
12940
12941
12942/* Opcode 0xf3 0x0f 0xd2 - invalid */
12943/* Opcode 0xf2 0x0f 0xd2 - invalid */
12944
12945/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12946FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12947{
12948 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12949 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12950}
12951
12952
12953/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12954FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12955{
12956 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12957 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12958}
12959
12960
12961/* Opcode 0xf3 0x0f 0xd3 - invalid */
12962/* Opcode 0xf2 0x0f 0xd3 - invalid */
12963
12964
12965/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12966FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12967{
12968 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12969 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_paddq_u64);
12970}
12971
12972
12973/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12974FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12975{
12976 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12977 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
12978}
12979
12980
12981/* Opcode 0xf3 0x0f 0xd4 - invalid */
12982/* Opcode 0xf2 0x0f 0xd4 - invalid */
12983
12984/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12985FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12986{
12987 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12988 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
12989}
12990
12991/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12992FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12993{
12994 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12995 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
12996}
12997
12998
12999/* Opcode 0xf3 0x0f 0xd5 - invalid */
13000/* Opcode 0xf2 0x0f 0xd5 - invalid */
13001
13002/* Opcode 0x0f 0xd6 - invalid */
13003
13004/**
13005 * @opcode 0xd6
13006 * @oppfx 0x66
13007 * @opcpuid sse2
13008 * @opgroup og_sse2_pcksclr_datamove
13009 * @opxcpttype none
13010 * @optest op1=-1 op2=2 -> op1=2
13011 * @optest op1=0 op2=-42 -> op1=-42
13012 */
13013FNIEMOP_DEF(iemOp_movq_Wq_Vq)
13014{
13015 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13016 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13017 if (IEM_IS_MODRM_REG_MODE(bRm))
13018 {
13019 /*
13020 * Register, register.
13021 */
13022 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER);
13023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13024 IEM_MC_LOCAL(uint64_t, uSrc);
13025
13026 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13027 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13028
13029 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
13030 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
13031
13032 IEM_MC_ADVANCE_RIP_AND_FINISH();
13033 IEM_MC_END();
13034 }
13035 else
13036 {
13037 /*
13038 * Memory, register.
13039 */
13040 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER);
13041 IEM_MC_LOCAL(uint64_t, uSrc);
13042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13043
13044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13046 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13047 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13048
13049 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
13050 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13051
13052 IEM_MC_ADVANCE_RIP_AND_FINISH();
13053 IEM_MC_END();
13054 }
13055}
13056
13057
13058/**
13059 * @opcode 0xd6
13060 * @opcodesub 11 mr/reg
13061 * @oppfx f3
13062 * @opcpuid sse2
13063 * @opgroup og_sse2_simdint_datamove
13064 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13065 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13066 */
13067FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
13068{
13069 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13070 if (IEM_IS_MODRM_REG_MODE(bRm))
13071 {
13072 /*
13073 * Register, register.
13074 */
13075 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13076 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER);
13077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13078 IEM_MC_LOCAL(uint64_t, uSrc);
13079
13080 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13081 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13082 IEM_MC_FPU_TO_MMX_MODE();
13083
13084 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
13085 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
13086
13087 IEM_MC_ADVANCE_RIP_AND_FINISH();
13088 IEM_MC_END();
13089 }
13090
13091 /**
13092 * @opdone
13093 * @opmnemonic udf30fd6mem
13094 * @opcode 0xd6
13095 * @opcodesub !11 mr/reg
13096 * @oppfx f3
13097 * @opunused intel-modrm
13098 * @opcpuid sse
13099 * @optest ->
13100 */
13101 else
13102 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13103}
13104
13105
13106/**
13107 * @opcode 0xd6
13108 * @opcodesub 11 mr/reg
13109 * @oppfx f2
13110 * @opcpuid sse2
13111 * @opgroup og_sse2_simdint_datamove
13112 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13113 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13114 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
13115 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
13116 * @optest op1=-42 op2=0xfedcba9876543210
13117 * -> op1=0xfedcba9876543210 ftw=0xff
13118 */
13119FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
13120{
13121 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13122 if (IEM_IS_MODRM_REG_MODE(bRm))
13123 {
13124 /*
13125 * Register, register.
13126 */
13127 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13128 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER);
13129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13130 IEM_MC_LOCAL(uint64_t, uSrc);
13131
13132 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13133 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13134 IEM_MC_FPU_TO_MMX_MODE();
13135
13136 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
13137 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
13138
13139 IEM_MC_ADVANCE_RIP_AND_FINISH();
13140 IEM_MC_END();
13141 }
13142
13143 /**
13144 * @opdone
13145 * @opmnemonic udf20fd6mem
13146 * @opcode 0xd6
13147 * @opcodesub !11 mr/reg
13148 * @oppfx f2
13149 * @opunused intel-modrm
13150 * @opcpuid sse
13151 * @optest ->
13152 */
13153 else
13154 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13155}
13156
13157
13158/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
13159FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
13160{
13161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13162 /* Docs says register only. */
13163 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13164 {
13165 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13166 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
13167 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
13168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13169 IEM_MC_ARG(uint64_t *, puDst, 0);
13170 IEM_MC_ARG(uint64_t const *, puSrc, 1);
13171 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13172 IEM_MC_PREPARE_FPU_USAGE();
13173 IEM_MC_FPU_TO_MMX_MODE();
13174
13175 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13176 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
13177 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
13178
13179 IEM_MC_ADVANCE_RIP_AND_FINISH();
13180 IEM_MC_END();
13181 }
13182 else
13183 IEMOP_RAISE_INVALID_OPCODE_RET();
13184}
13185
13186
13187/** Opcode 0x66 0x0f 0xd7 - */
13188FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
13189{
13190 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13191 /* Docs says register only. */
13192 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13193 {
13194 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13195 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
13196 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER);
13197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13198 IEM_MC_ARG(uint64_t *, puDst, 0);
13199 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
13200 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13201 IEM_MC_PREPARE_SSE_USAGE();
13202 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13203 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
13204 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
13205 IEM_MC_ADVANCE_RIP_AND_FINISH();
13206 IEM_MC_END();
13207 }
13208 else
13209 IEMOP_RAISE_INVALID_OPCODE_RET();
13210}
13211
13212
13213/* Opcode 0xf3 0x0f 0xd7 - invalid */
13214/* Opcode 0xf2 0x0f 0xd7 - invalid */
13215
13216
13217/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
13218FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
13219{
13220 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13221 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
13222}
13223
13224
13225/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
13226FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
13227{
13228 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13229 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
13230}
13231
13232
13233/* Opcode 0xf3 0x0f 0xd8 - invalid */
13234/* Opcode 0xf2 0x0f 0xd8 - invalid */
13235
13236/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
13237FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
13238{
13239 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13240 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
13241}
13242
13243
13244/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
13245FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
13246{
13247 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13248 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
13249}
13250
13251
13252/* Opcode 0xf3 0x0f 0xd9 - invalid */
13253/* Opcode 0xf2 0x0f 0xd9 - invalid */
13254
13255/** Opcode 0x0f 0xda - pminub Pq, Qq */
13256FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
13257{
13258 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13259 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
13260}
13261
13262
13263/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
13264FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
13265{
13266 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13267 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
13268}
13269
13270/* Opcode 0xf3 0x0f 0xda - invalid */
13271/* Opcode 0xf2 0x0f 0xda - invalid */
13272
13273/** Opcode 0x0f 0xdb - pand Pq, Qq */
13274FNIEMOP_DEF(iemOp_pand_Pq_Qq)
13275{
13276 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13277 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
13278}
13279
13280
13281/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
13282FNIEMOP_DEF(iemOp_pand_Vx_Wx)
13283{
13284 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13285 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
13286}
13287
13288
13289/* Opcode 0xf3 0x0f 0xdb - invalid */
13290/* Opcode 0xf2 0x0f 0xdb - invalid */
13291
13292/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
13293FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
13294{
13295 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13296 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
13297}
13298
13299
13300/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
13301FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
13302{
13303 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13304 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
13305}
13306
13307
13308/* Opcode 0xf3 0x0f 0xdc - invalid */
13309/* Opcode 0xf2 0x0f 0xdc - invalid */
13310
13311/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
13312FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
13313{
13314 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13315 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
13316}
13317
13318
13319/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13320FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13321{
13322 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13323 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
13324}
13325
13326
13327/* Opcode 0xf3 0x0f 0xdd - invalid */
13328/* Opcode 0xf2 0x0f 0xdd - invalid */
13329
13330/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13331FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13332{
13333 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13334 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
13335}
13336
13337
13338/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13339FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13340{
13341 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13342 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
13343}
13344
13345/* Opcode 0xf3 0x0f 0xde - invalid */
13346/* Opcode 0xf2 0x0f 0xde - invalid */
13347
13348
13349/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13350FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13351{
13352 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13353 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
13354}
13355
13356
13357/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13358FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13359{
13360 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13361 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
13362}
13363
13364
13365/* Opcode 0xf3 0x0f 0xdf - invalid */
13366/* Opcode 0xf2 0x0f 0xdf - invalid */
13367
13368/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13369FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13370{
13371 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13372 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13373}
13374
13375
13376/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13377FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13378{
13379 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13380 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13381}
13382
13383
13384/* Opcode 0xf3 0x0f 0xe0 - invalid */
13385/* Opcode 0xf2 0x0f 0xe0 - invalid */
13386
13387/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13388FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13389{
13390 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13391 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13392}
13393
13394
13395/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13396FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13397{
13398 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13399 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13400}
13401
13402
13403/* Opcode 0xf3 0x0f 0xe1 - invalid */
13404/* Opcode 0xf2 0x0f 0xe1 - invalid */
13405
13406/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13407FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13408{
13409 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13410 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13411}
13412
13413
13414/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13415FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13416{
13417 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13418 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13419}
13420
13421
13422/* Opcode 0xf3 0x0f 0xe2 - invalid */
13423/* Opcode 0xf2 0x0f 0xe2 - invalid */
13424
13425/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13426FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13427{
13428 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13429 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13430}
13431
13432
13433/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13434FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13435{
13436 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13437 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13438}
13439
13440
13441/* Opcode 0xf3 0x0f 0xe3 - invalid */
13442/* Opcode 0xf2 0x0f 0xe3 - invalid */
13443
13444/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13445FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13446{
13447 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13448 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13449}
13450
13451
13452/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13453FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13454{
13455 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13456 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13457}
13458
13459
13460/* Opcode 0xf3 0x0f 0xe4 - invalid */
13461/* Opcode 0xf2 0x0f 0xe4 - invalid */
13462
13463/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13464FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13465{
13466 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13467 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
13468}
13469
13470
13471/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13472FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13473{
13474 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13475 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
13476}
13477
13478
13479/* Opcode 0xf3 0x0f 0xe5 - invalid */
13480/* Opcode 0xf2 0x0f 0xe5 - invalid */
13481/* Opcode 0x0f 0xe6 - invalid */
13482
13483
13484/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13485FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13486{
13487 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13488 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13489}
13490
13491
13492/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13493FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13494{
13495 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13496 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13497}
13498
13499
13500/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13501FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13502{
13503 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13504 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13505}
13506
13507
13508/**
13509 * @opcode 0xe7
13510 * @opcodesub !11 mr/reg
13511 * @oppfx none
13512 * @opcpuid sse
13513 * @opgroup og_sse1_cachect
13514 * @opxcpttype none
13515 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13516 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13517 */
13518FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13519{
13520 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13522 if (IEM_IS_MODRM_MEM_MODE(bRm))
13523 {
13524 /* Register, memory. */
13525 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER);
13526 IEM_MC_LOCAL(uint64_t, uSrc);
13527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13528
13529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
13531 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13532 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13533 IEM_MC_FPU_TO_MMX_MODE();
13534
13535 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13536 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13537
13538 IEM_MC_ADVANCE_RIP_AND_FINISH();
13539 IEM_MC_END();
13540 }
13541 /**
13542 * @opdone
13543 * @opmnemonic ud0fe7reg
13544 * @opcode 0xe7
13545 * @opcodesub 11 mr/reg
13546 * @oppfx none
13547 * @opunused immediate
13548 * @opcpuid sse
13549 * @optest ->
13550 */
13551 else
13552 IEMOP_RAISE_INVALID_OPCODE_RET();
13553}
13554
13555/**
13556 * @opcode 0xe7
13557 * @opcodesub !11 mr/reg
13558 * @oppfx 0x66
13559 * @opcpuid sse2
13560 * @opgroup og_sse2_cachect
13561 * @opxcpttype 1
13562 * @optest op1=-1 op2=2 -> op1=2
13563 * @optest op1=0 op2=-42 -> op1=-42
13564 */
13565FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13566{
13567 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13568 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13569 if (IEM_IS_MODRM_MEM_MODE(bRm))
13570 {
13571 /* Register, memory. */
13572 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER);
13573 IEM_MC_LOCAL(RTUINT128U, uSrc);
13574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13575
13576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13578 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13579 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13580
13581 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13582 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13583
13584 IEM_MC_ADVANCE_RIP_AND_FINISH();
13585 IEM_MC_END();
13586 }
13587
13588 /**
13589 * @opdone
13590 * @opmnemonic ud660fe7reg
13591 * @opcode 0xe7
13592 * @opcodesub 11 mr/reg
13593 * @oppfx 0x66
13594 * @opunused immediate
13595 * @opcpuid sse
13596 * @optest ->
13597 */
13598 else
13599 IEMOP_RAISE_INVALID_OPCODE_RET();
13600}
13601
13602/* Opcode 0xf3 0x0f 0xe7 - invalid */
13603/* Opcode 0xf2 0x0f 0xe7 - invalid */
13604
13605
13606/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13607FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13608{
13609 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13610 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
13611}
13612
13613
13614/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13615FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13616{
13617 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13618 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
13619}
13620
13621
13622/* Opcode 0xf3 0x0f 0xe8 - invalid */
13623/* Opcode 0xf2 0x0f 0xe8 - invalid */
13624
13625/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13626FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13627{
13628 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13629 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
13630}
13631
13632
13633/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13634FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13635{
13636 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13637 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
13638}
13639
13640
13641/* Opcode 0xf3 0x0f 0xe9 - invalid */
13642/* Opcode 0xf2 0x0f 0xe9 - invalid */
13643
13644
13645/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13646FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13647{
13648 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13649 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
13650}
13651
13652
13653/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13654FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13655{
13656 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13657 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
13658}
13659
13660
13661/* Opcode 0xf3 0x0f 0xea - invalid */
13662/* Opcode 0xf2 0x0f 0xea - invalid */
13663
13664
13665/** Opcode 0x0f 0xeb - por Pq, Qq */
13666FNIEMOP_DEF(iemOp_por_Pq_Qq)
13667{
13668 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13669 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
13670}
13671
13672
13673/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13674FNIEMOP_DEF(iemOp_por_Vx_Wx)
13675{
13676 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13677 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
13678}
13679
13680
13681/* Opcode 0xf3 0x0f 0xeb - invalid */
13682/* Opcode 0xf2 0x0f 0xeb - invalid */
13683
13684/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13685FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13686{
13687 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13688 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
13689}
13690
13691
13692/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13693FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13694{
13695 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13696 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
13697}
13698
13699
13700/* Opcode 0xf3 0x0f 0xec - invalid */
13701/* Opcode 0xf2 0x0f 0xec - invalid */
13702
13703/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13704FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13705{
13706 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13707 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
13708}
13709
13710
13711/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13712FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13713{
13714 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13715 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
13716}
13717
13718
13719/* Opcode 0xf3 0x0f 0xed - invalid */
13720/* Opcode 0xf2 0x0f 0xed - invalid */
13721
13722
13723/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13724FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13725{
13726 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13727 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13728}
13729
13730
13731/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13732FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13733{
13734 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13735 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13736}
13737
13738
13739/* Opcode 0xf3 0x0f 0xee - invalid */
13740/* Opcode 0xf2 0x0f 0xee - invalid */
13741
13742
13743/** Opcode 0x0f 0xef - pxor Pq, Qq */
13744FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13745{
13746 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13747 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
13748}
13749
13750
13751/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13752FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13753{
13754 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13755 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
13756}
13757
13758
13759/* Opcode 0xf3 0x0f 0xef - invalid */
13760/* Opcode 0xf2 0x0f 0xef - invalid */
13761
13762/* Opcode 0x0f 0xf0 - invalid */
13763/* Opcode 0x66 0x0f 0xf0 - invalid */
13764
13765
13766/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13767FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13768{
13769 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13771 if (IEM_IS_MODRM_REG_MODE(bRm))
13772 {
13773 /*
13774 * Register, register - (not implemented, assuming it raises \#UD).
13775 */
13776 IEMOP_RAISE_INVALID_OPCODE_RET();
13777 }
13778 else
13779 {
13780 /*
13781 * Register, memory.
13782 */
13783 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER);
13784 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13786
13787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
13789 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13790 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13791 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13792 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13793
13794 IEM_MC_ADVANCE_RIP_AND_FINISH();
13795 IEM_MC_END();
13796 }
13797}
13798
13799
13800/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13801FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13802{
13803 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13804 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13805}
13806
13807
13808/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13809FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13810{
13811 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13812 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13813}
13814
13815
13816/* Opcode 0xf2 0x0f 0xf1 - invalid */
13817
13818/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13819FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13820{
13821 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13822 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13823}
13824
13825
13826/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13827FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13828{
13829 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13830 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13831}
13832
13833
13834/* Opcode 0xf2 0x0f 0xf2 - invalid */
13835
13836/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13837FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13838{
13839 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13840 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13841}
13842
13843
13844/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13845FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13846{
13847 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13848 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13849}
13850
13851/* Opcode 0xf2 0x0f 0xf3 - invalid */
13852
13853/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13854FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13855{
13856 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13857 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
13858}
13859
13860
13861/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13862FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13863{
13864 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13865 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
13866}
13867
13868
13869/* Opcode 0xf2 0x0f 0xf4 - invalid */
13870
13871/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13872FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13873{
13874 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13875 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13876}
13877
13878
13879/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13880FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13881{
13882 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13883 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13884}
13885
13886/* Opcode 0xf2 0x0f 0xf5 - invalid */
13887
13888/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13889FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13890{
13891 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13892 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13893}
13894
13895
13896/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13897FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13898{
13899 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13900 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13901}
13902
13903
13904/* Opcode 0xf2 0x0f 0xf6 - invalid */
13905
13906/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13907FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13908/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13909FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13910/* Opcode 0xf2 0x0f 0xf7 - invalid */
13911
13912
13913/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13914FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13915{
13916 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13917 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
13918}
13919
13920
13921/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13922FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13923{
13924 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13925 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
13926}
13927
13928
13929/* Opcode 0xf2 0x0f 0xf8 - invalid */
13930
13931
13932/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13933FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13934{
13935 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13936 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
13937}
13938
13939
13940/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13941FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13942{
13943 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13944 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
13945}
13946
13947
13948/* Opcode 0xf2 0x0f 0xf9 - invalid */
13949
13950
13951/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13952FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13953{
13954 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13955 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
13956}
13957
13958
13959/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13960FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13961{
13962 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13963 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
13964}
13965
13966
13967/* Opcode 0xf2 0x0f 0xfa - invalid */
13968
13969
13970/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13971FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13972{
13973 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13974 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_psubq_u64);
13975}
13976
13977
13978/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13979FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13980{
13981 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13982 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
13983}
13984
13985
13986/* Opcode 0xf2 0x0f 0xfb - invalid */
13987
13988
13989/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13990FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13991{
13992 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13993 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
13994}
13995
13996
13997/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13998FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13999{
14000 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14001 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
14002}
14003
14004
14005/* Opcode 0xf2 0x0f 0xfc - invalid */
14006
14007
14008/** Opcode 0x0f 0xfd - paddw Pq, Qq */
14009FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
14010{
14011 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14012 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
14013}
14014
14015
14016/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
14017FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
14018{
14019 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14020 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
14021}
14022
14023
14024/* Opcode 0xf2 0x0f 0xfd - invalid */
14025
14026
14027/** Opcode 0x0f 0xfe - paddd Pq, Qq */
14028FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
14029{
14030 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14031 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
14032}
14033
14034
14035/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
14036FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
14037{
14038 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14039 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
14040}
14041
14042
14043/* Opcode 0xf2 0x0f 0xfe - invalid */
14044
14045
14046/** Opcode **** 0x0f 0xff - UD0 */
14047FNIEMOP_DEF(iemOp_ud0)
14048{
14049 IEMOP_MNEMONIC(ud0, "ud0");
14050 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
14051 {
14052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
14053 if (IEM_IS_MODRM_MEM_MODE(bRm))
14054 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
14055 }
14056 IEMOP_HLP_DONE_DECODING();
14057 IEMOP_RAISE_INVALID_OPCODE_RET();
14058}
14059
14060
14061
14062/**
14063 * Two byte opcode map, first byte 0x0f.
14064 *
14065 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
14066 * check if it needs updating as well when making changes.
14067 */
14068const PFNIEMOP g_apfnTwoByteMap[] =
14069{
14070 /* no prefix, 066h prefix f3h prefix, f2h prefix */
14071 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
14072 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
14073 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
14074 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
14075 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
14076 /* 0x05 */ IEMOP_X4(iemOp_syscall),
14077 /* 0x06 */ IEMOP_X4(iemOp_clts),
14078 /* 0x07 */ IEMOP_X4(iemOp_sysret),
14079 /* 0x08 */ IEMOP_X4(iemOp_invd),
14080 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
14081 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
14082 /* 0x0b */ IEMOP_X4(iemOp_ud2),
14083 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
14084 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
14085 /* 0x0e */ IEMOP_X4(iemOp_femms),
14086 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
14087
14088 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
14089 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
14090 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
14091 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14092 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14093 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14094 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
14095 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14096 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
14097 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
14098 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
14099 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
14100 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
14101 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
14102 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
14103 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
14104
14105 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
14106 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
14107 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
14108 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
14109 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
14110 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14111 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
14112 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14113 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14114 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14115 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
14116 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14117 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
14118 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
14119 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14120 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14121
14122 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
14123 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
14124 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
14125 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
14126 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
14127 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
14128 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
14129 /* 0x37 */ IEMOP_X4(iemOp_getsec),
14130 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
14131 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14132 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
14133 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14134 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14135 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14136 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14137 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14138
14139 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
14140 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
14141 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
14142 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
14143 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
14144 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
14145 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
14146 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
14147 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
14148 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
14149 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
14150 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
14151 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
14152 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
14153 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
14154 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
14155
14156 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14157 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
14158 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
14159 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
14160 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14161 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14162 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14163 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14164 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
14165 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
14166 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
14167 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
14168 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
14169 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
14170 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
14171 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
14172
14173 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14174 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14175 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14176 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14177 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14178 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14179 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14180 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14181 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14182 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14183 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14184 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14185 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14186 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14187 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14188 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
14189
14190 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
14191 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
14192 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
14193 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
14194 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14195 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14196 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14197 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14198
14199 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14200 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14201 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14202 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14203 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
14204 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
14205 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
14206 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
14207
14208 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
14209 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
14210 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
14211 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
14212 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
14213 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
14214 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
14215 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
14216 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
14217 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
14218 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
14219 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
14220 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
14221 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
14222 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
14223 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
14224
14225 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
14226 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
14227 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
14228 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
14229 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
14230 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
14231 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
14232 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
14233 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
14234 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
14235 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
14236 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
14237 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
14238 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
14239 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
14240 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
14241
14242 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
14243 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
14244 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
14245 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
14246 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
14247 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
14248 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
14249 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
14250 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
14251 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
14252 /* 0xaa */ IEMOP_X4(iemOp_rsm),
14253 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
14254 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
14255 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
14256 /* 0xae */ IEMOP_X4(iemOp_Grp15),
14257 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
14258
14259 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
14260 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
14261 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
14262 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
14263 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
14264 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
14265 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
14266 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
14267 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
14268 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
14269 /* 0xba */ IEMOP_X4(iemOp_Grp8),
14270 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
14271 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
14272 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
14273 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
14274 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
14275
14276 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
14277 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
14278 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
14279 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14280 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14281 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14282 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14283 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
14284 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
14285 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
14286 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
14287 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
14288 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
14289 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
14290 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
14291 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
14292
14293 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
14294 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14295 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14296 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14297 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14298 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14299 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
14300 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14301 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14302 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14303 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14304 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14305 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14306 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14307 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14308 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14309
14310 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14311 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14312 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14313 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14314 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14315 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14316 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
14317 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14318 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14319 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14320 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14321 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14322 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14323 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14324 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14325 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14326
14327 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14328 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14329 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14330 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14331 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14332 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14333 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14334 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14335 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14336 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14337 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14338 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14339 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14340 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14341 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14342 /* 0xff */ IEMOP_X4(iemOp_ud0),
14343};
14344AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14345
14346/** @} */
14347
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette