VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h@ 101958

Last change on this file since 101958 was 101958, checked in by vboxsync, 15 months ago

VMM/IEM: Flush PC and other shadow guest register copies when emitting CIMPL calls in the native recompiler. bugref:10371

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 519.2 KB
Line 
1/* $Id: IEMAllInstTwoByte0f.cpp.h 101958 2023-11-08 10:54:58Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * MMX, MMX.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
53 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_FPU_TO_MMX_MODE();
59
60 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
61 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
62 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
63 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
64
65 IEM_MC_ADVANCE_RIP_AND_FINISH();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * MMX, [mem64].
72 */
73 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
74 IEM_MC_ARG(uint64_t *, pDst, 0);
75 IEM_MC_LOCAL(uint64_t, uSrc);
76 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
81 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_FPU_USAGE();
85 IEM_MC_FPU_TO_MMX_MODE();
86
87 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
88 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
89 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
90
91 IEM_MC_ADVANCE_RIP_AND_FINISH();
92 IEM_MC_END();
93 }
94}
95
96
97/**
98 * Common worker for MMX instructions on the form:
99 * pxxx mm1, mm2/mem64
100 *
101 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
102 * no FXSAVE state, just the operands.
103 */
104FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
105{
106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
107 if (IEM_IS_MODRM_REG_MODE(bRm))
108 {
109 /*
110 * MMX, MMX.
111 */
112 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
113 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
114 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
116 IEM_MC_ARG(uint64_t *, pDst, 0);
117 IEM_MC_ARG(uint64_t const *, pSrc, 1);
118 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
119 IEM_MC_PREPARE_FPU_USAGE();
120 IEM_MC_FPU_TO_MMX_MODE();
121
122 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
123 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
124 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
125 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
126
127 IEM_MC_ADVANCE_RIP_AND_FINISH();
128 IEM_MC_END();
129 }
130 else
131 {
132 /*
133 * MMX, [mem64].
134 */
135 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
136 IEM_MC_ARG(uint64_t *, pDst, 0);
137 IEM_MC_LOCAL(uint64_t, uSrc);
138 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
140
141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
143 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
144 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
145
146 IEM_MC_PREPARE_FPU_USAGE();
147 IEM_MC_FPU_TO_MMX_MODE();
148
149 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
150 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
151 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
152
153 IEM_MC_ADVANCE_RIP_AND_FINISH();
154 IEM_MC_END();
155 }
156}
157
158
159/**
160 * Common worker for MMX instructions on the form:
161 * pxxx mm1, mm2/mem64
162 * for instructions introduced with SSE.
163 */
164FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
165{
166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
167 if (IEM_IS_MODRM_REG_MODE(bRm))
168 {
169 /*
170 * MMX, MMX.
171 */
172 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
173 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
174 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
176 IEM_MC_ARG(uint64_t *, pDst, 0);
177 IEM_MC_ARG(uint64_t const *, pSrc, 1);
178 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
179 IEM_MC_PREPARE_FPU_USAGE();
180 IEM_MC_FPU_TO_MMX_MODE();
181
182 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
183 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
184 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
185 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
186
187 IEM_MC_ADVANCE_RIP_AND_FINISH();
188 IEM_MC_END();
189 }
190 else
191 {
192 /*
193 * MMX, [mem64].
194 */
195 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
196 IEM_MC_ARG(uint64_t *, pDst, 0);
197 IEM_MC_LOCAL(uint64_t, uSrc);
198 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200
201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
203 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
204 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
205
206 IEM_MC_PREPARE_FPU_USAGE();
207 IEM_MC_FPU_TO_MMX_MODE();
208
209 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
210 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
211 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
212
213 IEM_MC_ADVANCE_RIP_AND_FINISH();
214 IEM_MC_END();
215 }
216}
217
218
219/**
220 * Common worker for MMX instructions on the form:
221 * pxxx mm1, mm2/mem64
222 * for instructions introduced with SSE.
223 *
224 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
225 * no FXSAVE state, just the operands.
226 */
227FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
228{
229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
230 if (IEM_IS_MODRM_REG_MODE(bRm))
231 {
232 /*
233 * MMX, MMX.
234 */
235 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
236 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
237 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
239 IEM_MC_ARG(uint64_t *, pDst, 0);
240 IEM_MC_ARG(uint64_t const *, pSrc, 1);
241 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
242 IEM_MC_PREPARE_FPU_USAGE();
243 IEM_MC_FPU_TO_MMX_MODE();
244
245 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
246 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
247 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
248 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
249
250 IEM_MC_ADVANCE_RIP_AND_FINISH();
251 IEM_MC_END();
252 }
253 else
254 {
255 /*
256 * MMX, [mem64].
257 */
258 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
259 IEM_MC_ARG(uint64_t *, pDst, 0);
260 IEM_MC_LOCAL(uint64_t, uSrc);
261 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
263
264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
267 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
268
269 IEM_MC_PREPARE_FPU_USAGE();
270 IEM_MC_FPU_TO_MMX_MODE();
271
272 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
273 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
274 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
275
276 IEM_MC_ADVANCE_RIP_AND_FINISH();
277 IEM_MC_END();
278 }
279}
280
281
282/**
283 * Common worker for MMX instructions on the form:
284 * pxxx mm1, mm2/mem64
285 * that was introduced with SSE2.
286 */
287FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full_Sse2, PFNIEMAIMPLMEDIAF2U64, pfnU64)
288{
289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
290 if (IEM_IS_MODRM_REG_MODE(bRm))
291 {
292 /*
293 * MMX, MMX.
294 */
295 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
296 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
297 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
299 IEM_MC_ARG(uint64_t *, pDst, 0);
300 IEM_MC_ARG(uint64_t const *, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
302 IEM_MC_PREPARE_FPU_USAGE();
303 IEM_MC_FPU_TO_MMX_MODE();
304
305 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
306 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
307 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
308 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
309
310 IEM_MC_ADVANCE_RIP_AND_FINISH();
311 IEM_MC_END();
312 }
313 else
314 {
315 /*
316 * MMX, [mem64].
317 */
318 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
319 IEM_MC_ARG(uint64_t *, pDst, 0);
320 IEM_MC_LOCAL(uint64_t, uSrc);
321 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
323
324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
326 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
327 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
328
329 IEM_MC_PREPARE_FPU_USAGE();
330 IEM_MC_FPU_TO_MMX_MODE();
331
332 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
333 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
334 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
335
336 IEM_MC_ADVANCE_RIP_AND_FINISH();
337 IEM_MC_END();
338 }
339}
340
341
342/**
343 * Common worker for SSE instructions of the form:
344 * pxxx xmm1, xmm2/mem128
345 *
346 * Proper alignment of the 128-bit operand is enforced.
347 * SSE cpuid checks. No SIMD FP exceptions.
348 *
349 * @sa iemOpCommonSse2_FullFull_To_Full
350 */
351FNIEMOP_DEF_1(iemOpCommonSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
352{
353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
354 if (IEM_IS_MODRM_REG_MODE(bRm))
355 {
356 /*
357 * XMM, XMM.
358 */
359 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
361 IEM_MC_ARG(PRTUINT128U, pDst, 0);
362 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
363 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
364 IEM_MC_PREPARE_SSE_USAGE();
365 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
366 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
367 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
368 IEM_MC_ADVANCE_RIP_AND_FINISH();
369 IEM_MC_END();
370 }
371 else
372 {
373 /*
374 * XMM, [mem128].
375 */
376 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
377 IEM_MC_ARG(PRTUINT128U, pDst, 0);
378 IEM_MC_LOCAL(RTUINT128U, uSrc);
379 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
381
382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
384 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
385 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
386
387 IEM_MC_PREPARE_SSE_USAGE();
388 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
389 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
390
391 IEM_MC_ADVANCE_RIP_AND_FINISH();
392 IEM_MC_END();
393 }
394}
395
396
397/**
398 * Common worker for SSE2 instructions on the forms:
399 * pxxx xmm1, xmm2/mem128
400 *
401 * Proper alignment of the 128-bit operand is enforced.
402 * Exceptions type 4. SSE2 cpuid checks.
403 *
404 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
405 */
406FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
407{
408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
409 if (IEM_IS_MODRM_REG_MODE(bRm))
410 {
411 /*
412 * XMM, XMM.
413 */
414 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
417 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
418 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
419 IEM_MC_PREPARE_SSE_USAGE();
420 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
421 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
422 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
423 IEM_MC_ADVANCE_RIP_AND_FINISH();
424 IEM_MC_END();
425 }
426 else
427 {
428 /*
429 * XMM, [mem128].
430 */
431 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
432 IEM_MC_ARG(PRTUINT128U, pDst, 0);
433 IEM_MC_LOCAL(RTUINT128U, uSrc);
434 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
436
437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
439 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
440 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
441
442 IEM_MC_PREPARE_SSE_USAGE();
443 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
444 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
445
446 IEM_MC_ADVANCE_RIP_AND_FINISH();
447 IEM_MC_END();
448 }
449}
450
451
452/**
453 * Common worker for SSE2 instructions on the forms:
454 * pxxx xmm1, xmm2/mem128
455 *
456 * Proper alignment of the 128-bit operand is enforced.
457 * Exceptions type 4. SSE2 cpuid checks.
458 *
459 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
460 * no FXSAVE state, just the operands.
461 *
462 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
463 */
464FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
465{
466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
467 if (IEM_IS_MODRM_REG_MODE(bRm))
468 {
469 /*
470 * XMM, XMM.
471 */
472 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
474 IEM_MC_ARG(PRTUINT128U, pDst, 0);
475 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
477 IEM_MC_PREPARE_SSE_USAGE();
478 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
479 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
480 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
481 IEM_MC_ADVANCE_RIP_AND_FINISH();
482 IEM_MC_END();
483 }
484 else
485 {
486 /*
487 * XMM, [mem128].
488 */
489 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
490 IEM_MC_ARG(PRTUINT128U, pDst, 0);
491 IEM_MC_LOCAL(RTUINT128U, uSrc);
492 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
494
495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
498 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
499
500 IEM_MC_PREPARE_SSE_USAGE();
501 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
502 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
503
504 IEM_MC_ADVANCE_RIP_AND_FINISH();
505 IEM_MC_END();
506 }
507}
508
509
510/**
511 * Common worker for MMX instructions on the forms:
512 * pxxxx mm1, mm2/mem32
513 *
514 * The 2nd operand is the first half of a register, which in the memory case
515 * means a 32-bit memory access.
516 */
517FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
518{
519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
520 if (IEM_IS_MODRM_REG_MODE(bRm))
521 {
522 /*
523 * MMX, MMX.
524 */
525 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
527 IEM_MC_ARG(uint64_t *, puDst, 0);
528 IEM_MC_ARG(uint64_t const *, puSrc, 1);
529 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
530 IEM_MC_PREPARE_FPU_USAGE();
531 IEM_MC_FPU_TO_MMX_MODE();
532
533 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
534 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
535 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
536 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
537
538 IEM_MC_ADVANCE_RIP_AND_FINISH();
539 IEM_MC_END();
540 }
541 else
542 {
543 /*
544 * MMX, [mem32].
545 */
546 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
547 IEM_MC_ARG(uint64_t *, puDst, 0);
548 IEM_MC_LOCAL(uint64_t, uSrc);
549 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
551
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
554 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
555 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
556
557 IEM_MC_PREPARE_FPU_USAGE();
558 IEM_MC_FPU_TO_MMX_MODE();
559
560 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
561 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
562 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
563
564 IEM_MC_ADVANCE_RIP_AND_FINISH();
565 IEM_MC_END();
566 }
567}
568
569
570/**
571 * Common worker for SSE instructions on the forms:
572 * pxxxx xmm1, xmm2/mem128
573 *
574 * The 2nd operand is the first half of a register, which in the memory case
575 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
576 *
577 * Exceptions type 4.
578 */
579FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
580{
581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
582 if (IEM_IS_MODRM_REG_MODE(bRm))
583 {
584 /*
585 * XMM, XMM.
586 */
587 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
589 IEM_MC_ARG(PRTUINT128U, puDst, 0);
590 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
591 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
592 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
593 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
594 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
595 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
596 IEM_MC_ADVANCE_RIP_AND_FINISH();
597 IEM_MC_END();
598 }
599 else
600 {
601 /*
602 * XMM, [mem128].
603 */
604 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
606 IEM_MC_LOCAL(RTUINT128U, uSrc);
607 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
609
610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
612 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
613 /** @todo Most CPUs probably only read the low qword. We read everything to
614 * make sure we apply segmentation and alignment checks correctly.
615 * When we have time, it would be interesting to explore what real
616 * CPUs actually does and whether it will do a TLB load for the high
617 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
618 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
619
620 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
621 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
622 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
623
624 IEM_MC_ADVANCE_RIP_AND_FINISH();
625 IEM_MC_END();
626 }
627}
628
629
630/**
631 * Common worker for SSE2 instructions on the forms:
632 * pxxxx xmm1, xmm2/mem128
633 *
634 * The 2nd operand is the first half of a register, which in the memory case
635 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
636 *
637 * Exceptions type 4.
638 */
639FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
640{
641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
642 if (IEM_IS_MODRM_REG_MODE(bRm))
643 {
644 /*
645 * XMM, XMM.
646 */
647 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
649 IEM_MC_ARG(PRTUINT128U, puDst, 0);
650 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
651 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
653 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
654 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
655 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
656 IEM_MC_ADVANCE_RIP_AND_FINISH();
657 IEM_MC_END();
658 }
659 else
660 {
661 /*
662 * XMM, [mem128].
663 */
664 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
665 IEM_MC_ARG(PRTUINT128U, puDst, 0);
666 IEM_MC_LOCAL(RTUINT128U, uSrc);
667 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
669
670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
672 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
673 /** @todo Most CPUs probably only read the low qword. We read everything to
674 * make sure we apply segmentation and alignment checks correctly.
675 * When we have time, it would be interesting to explore what real
676 * CPUs actually does and whether it will do a TLB load for the high
677 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
678 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
679
680 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
681 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
682 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
683
684 IEM_MC_ADVANCE_RIP_AND_FINISH();
685 IEM_MC_END();
686 }
687}
688
689
690/**
691 * Common worker for MMX instructions on the form:
692 * pxxxx mm1, mm2/mem64
693 *
694 * The 2nd operand is the second half of a register, which in the memory case
695 * means a 64-bit memory access for MMX.
696 */
697FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
698{
699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
700 if (IEM_IS_MODRM_REG_MODE(bRm))
701 {
702 /*
703 * MMX, MMX.
704 */
705 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
706 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
707 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
709 IEM_MC_ARG(uint64_t *, puDst, 0);
710 IEM_MC_ARG(uint64_t const *, puSrc, 1);
711 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
712 IEM_MC_PREPARE_FPU_USAGE();
713 IEM_MC_FPU_TO_MMX_MODE();
714
715 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
716 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
717 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
718 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
719
720 IEM_MC_ADVANCE_RIP_AND_FINISH();
721 IEM_MC_END();
722 }
723 else
724 {
725 /*
726 * MMX, [mem64].
727 */
728 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
729 IEM_MC_ARG(uint64_t *, puDst, 0);
730 IEM_MC_LOCAL(uint64_t, uSrc);
731 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
733
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
736 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
737 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
738
739 IEM_MC_PREPARE_FPU_USAGE();
740 IEM_MC_FPU_TO_MMX_MODE();
741
742 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
743 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
744 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
745
746 IEM_MC_ADVANCE_RIP_AND_FINISH();
747 IEM_MC_END();
748 }
749}
750
751
752/**
753 * Common worker for SSE instructions on the form:
754 * pxxxx xmm1, xmm2/mem128
755 *
756 * The 2nd operand is the second half of a register, which for SSE a 128-bit
757 * aligned access where it may read the full 128 bits or only the upper 64 bits.
758 *
759 * Exceptions type 4.
760 */
761FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
762{
763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
764 if (IEM_IS_MODRM_REG_MODE(bRm))
765 {
766 /*
767 * XMM, XMM.
768 */
769 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
771 IEM_MC_ARG(PRTUINT128U, puDst, 0);
772 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
773 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
774 IEM_MC_PREPARE_SSE_USAGE();
775 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
776 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
777 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
778 IEM_MC_ADVANCE_RIP_AND_FINISH();
779 IEM_MC_END();
780 }
781 else
782 {
783 /*
784 * XMM, [mem128].
785 */
786 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
787 IEM_MC_ARG(PRTUINT128U, puDst, 0);
788 IEM_MC_LOCAL(RTUINT128U, uSrc);
789 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
791
792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
794 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
795 /** @todo Most CPUs probably only read the high qword. We read everything to
796 * make sure we apply segmentation and alignment checks correctly.
797 * When we have time, it would be interesting to explore what real
798 * CPUs actually does and whether it will do a TLB load for the lower
799 * part or skip any associated \#PF. */
800 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801
802 IEM_MC_PREPARE_SSE_USAGE();
803 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
804 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
805
806 IEM_MC_ADVANCE_RIP_AND_FINISH();
807 IEM_MC_END();
808 }
809}
810
811
812/**
813 * Common worker for SSE instructions on the forms:
814 * pxxs xmm1, xmm2/mem128
815 *
816 * Proper alignment of the 128-bit operand is enforced.
817 * Exceptions type 2. SSE cpuid checks.
818 *
819 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
820 */
821FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
822{
823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
824 if (IEM_IS_MODRM_REG_MODE(bRm))
825 {
826 /*
827 * XMM128, XMM128.
828 */
829 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
831 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
832 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
833 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
834 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
835 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
836 IEM_MC_PREPARE_SSE_USAGE();
837 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
838 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
839 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
840 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
841 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
842
843 IEM_MC_ADVANCE_RIP_AND_FINISH();
844 IEM_MC_END();
845 }
846 else
847 {
848 /*
849 * XMM128, [mem128].
850 */
851 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
852 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
853 IEM_MC_LOCAL(X86XMMREG, uSrc2);
854 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
855 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
856 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
858
859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
861 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
862 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
863
864 IEM_MC_PREPARE_SSE_USAGE();
865 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
866 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
867 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
868 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
869
870 IEM_MC_ADVANCE_RIP_AND_FINISH();
871 IEM_MC_END();
872 }
873}
874
875
876/**
877 * Common worker for SSE instructions on the forms:
878 * pxxs xmm1, xmm2/mem32
879 *
880 * Proper alignment of the 128-bit operand is enforced.
881 * Exceptions type 2. SSE cpuid checks.
882 *
883 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
884 */
885FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
886{
887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
888 if (IEM_IS_MODRM_REG_MODE(bRm))
889 {
890 /*
891 * XMM128, XMM32.
892 */
893 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
895 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
896 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
897 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
898 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
899 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
900 IEM_MC_PREPARE_SSE_USAGE();
901 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
902 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
903 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
904 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
905 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
906
907 IEM_MC_ADVANCE_RIP_AND_FINISH();
908 IEM_MC_END();
909 }
910 else
911 {
912 /*
913 * XMM128, [mem32].
914 */
915 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
916 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
917 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
918 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
919 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
920 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
922
923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
925 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
926 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
927
928 IEM_MC_PREPARE_SSE_USAGE();
929 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
930 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
931 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
932 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
933
934 IEM_MC_ADVANCE_RIP_AND_FINISH();
935 IEM_MC_END();
936 }
937}
938
939
940/**
941 * Common worker for SSE2 instructions on the forms:
942 * pxxd xmm1, xmm2/mem128
943 *
944 * Proper alignment of the 128-bit operand is enforced.
945 * Exceptions type 2. SSE cpuid checks.
946 *
947 * @sa iemOpCommonSseFp_FullFull_To_Full
948 */
949FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
950{
951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
952 if (IEM_IS_MODRM_REG_MODE(bRm))
953 {
954 /*
955 * XMM128, XMM128.
956 */
957 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
959 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
960 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
961 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
962 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
963 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
964 IEM_MC_PREPARE_SSE_USAGE();
965 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
966 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
967 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
968 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
969 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
970
971 IEM_MC_ADVANCE_RIP_AND_FINISH();
972 IEM_MC_END();
973 }
974 else
975 {
976 /*
977 * XMM128, [mem128].
978 */
979 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
980 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
981 IEM_MC_LOCAL(X86XMMREG, uSrc2);
982 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
983 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
984 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
989 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
990 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
991
992 IEM_MC_PREPARE_SSE_USAGE();
993 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
994 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
995 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
996 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
997
998 IEM_MC_ADVANCE_RIP_AND_FINISH();
999 IEM_MC_END();
1000 }
1001}
1002
1003
1004/**
1005 * Common worker for SSE2 instructions on the forms:
1006 * pxxs xmm1, xmm2/mem64
1007 *
1008 * Proper alignment of the 128-bit operand is enforced.
1009 * Exceptions type 2. SSE2 cpuid checks.
1010 *
1011 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1012 */
1013FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
1014{
1015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1016 if (IEM_IS_MODRM_REG_MODE(bRm))
1017 {
1018 /*
1019 * XMM, XMM.
1020 */
1021 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1023 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1024 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1025 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1026 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
1027 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1028 IEM_MC_PREPARE_SSE_USAGE();
1029 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1030 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1031 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
1032 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1033 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1034
1035 IEM_MC_ADVANCE_RIP_AND_FINISH();
1036 IEM_MC_END();
1037 }
1038 else
1039 {
1040 /*
1041 * XMM, [mem64].
1042 */
1043 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1044 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1045 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
1046 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1047 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1048 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
1049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1050
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1053 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1054 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1055
1056 IEM_MC_PREPARE_SSE_USAGE();
1057 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1058 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
1059 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1060 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1061
1062 IEM_MC_ADVANCE_RIP_AND_FINISH();
1063 IEM_MC_END();
1064 }
1065}
1066
1067
1068/**
1069 * Common worker for SSE2 instructions on the form:
1070 * pxxxx xmm1, xmm2/mem128
1071 *
1072 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1073 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1074 *
1075 * Exceptions type 4.
1076 */
1077FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1078{
1079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1080 if (IEM_IS_MODRM_REG_MODE(bRm))
1081 {
1082 /*
1083 * XMM, XMM.
1084 */
1085 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1087 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1088 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1089 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1090 IEM_MC_PREPARE_SSE_USAGE();
1091 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1092 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1093 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1094 IEM_MC_ADVANCE_RIP_AND_FINISH();
1095 IEM_MC_END();
1096 }
1097 else
1098 {
1099 /*
1100 * XMM, [mem128].
1101 */
1102 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1103 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1104 IEM_MC_LOCAL(RTUINT128U, uSrc);
1105 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1107
1108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1110 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1111 /** @todo Most CPUs probably only read the high qword. We read everything to
1112 * make sure we apply segmentation and alignment checks correctly.
1113 * When we have time, it would be interesting to explore what real
1114 * CPUs actually does and whether it will do a TLB load for the lower
1115 * part or skip any associated \#PF. */
1116 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1117
1118 IEM_MC_PREPARE_SSE_USAGE();
1119 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1120 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1121
1122 IEM_MC_ADVANCE_RIP_AND_FINISH();
1123 IEM_MC_END();
1124 }
1125}
1126
1127
1128/**
1129 * Common worker for SSE3 instructions on the forms:
1130 * hxxx xmm1, xmm2/mem128
1131 *
1132 * Proper alignment of the 128-bit operand is enforced.
1133 * Exceptions type 2. SSE3 cpuid checks.
1134 *
1135 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1136 */
1137FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1138{
1139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1140 if (IEM_IS_MODRM_REG_MODE(bRm))
1141 {
1142 /*
1143 * XMM, XMM.
1144 */
1145 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1147 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1148 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1149 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1150 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1151 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1152 IEM_MC_PREPARE_SSE_USAGE();
1153 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1154 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1155 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1156 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1157 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1158
1159 IEM_MC_ADVANCE_RIP_AND_FINISH();
1160 IEM_MC_END();
1161 }
1162 else
1163 {
1164 /*
1165 * XMM, [mem128].
1166 */
1167 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1168 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1169 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1170 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1171 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1172 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1174
1175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1177 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1178 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1179
1180 IEM_MC_PREPARE_SSE_USAGE();
1181 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1182 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1183 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1184 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1185
1186 IEM_MC_ADVANCE_RIP_AND_FINISH();
1187 IEM_MC_END();
1188 }
1189}
1190
1191
1192/** Opcode 0x0f 0x00 /0. */
1193FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1194{
1195 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1196 IEMOP_HLP_MIN_286();
1197 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1198
1199 if (IEM_IS_MODRM_REG_MODE(bRm))
1200 {
1201 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1202 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1203 }
1204
1205 /* Ignore operand size here, memory refs are always 16-bit. */
1206 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1207 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1209 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1210 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1211 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1212 IEM_MC_END();
1213}
1214
1215
1216/** Opcode 0x0f 0x00 /1. */
1217FNIEMOPRM_DEF(iemOp_Grp6_str)
1218{
1219 IEMOP_MNEMONIC(str, "str Rv/Mw");
1220 IEMOP_HLP_MIN_286();
1221 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1222
1223
1224 if (IEM_IS_MODRM_REG_MODE(bRm))
1225 {
1226 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1227 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1228 }
1229
1230 /* Ignore operand size here, memory refs are always 16-bit. */
1231 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1232 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1234 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1235 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1236 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1237 IEM_MC_END();
1238}
1239
1240
1241/** Opcode 0x0f 0x00 /2. */
1242FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1243{
1244 IEMOP_MNEMONIC(lldt, "lldt Ew");
1245 IEMOP_HLP_MIN_286();
1246 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1247
1248 if (IEM_IS_MODRM_REG_MODE(bRm))
1249 {
1250 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_286, 0);
1251 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1252 IEM_MC_ARG(uint16_t, u16Sel, 0);
1253 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1254 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_lldt, u16Sel);
1255 IEM_MC_END();
1256 }
1257 else
1258 {
1259 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_286, 0);
1260 IEM_MC_ARG(uint16_t, u16Sel, 0);
1261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1263 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1264 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1265 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1266 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_lldt, u16Sel);
1267 IEM_MC_END();
1268 }
1269}
1270
1271
1272/** Opcode 0x0f 0x00 /3. */
1273FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1274{
1275 IEMOP_MNEMONIC(ltr, "ltr Ew");
1276 IEMOP_HLP_MIN_286();
1277 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1278
1279 if (IEM_IS_MODRM_REG_MODE(bRm))
1280 {
1281 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_286, 0);
1282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1283 IEM_MC_ARG(uint16_t, u16Sel, 0);
1284 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1285 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_ltr, u16Sel);
1286 IEM_MC_END();
1287 }
1288 else
1289 {
1290 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_286, 0);
1291 IEM_MC_ARG(uint16_t, u16Sel, 0);
1292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1295 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1296 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1297 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_ltr, u16Sel);
1298 IEM_MC_END();
1299 }
1300}
1301
1302
1303/** Opcode 0x0f 0x00 /3. */
1304FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1305{
1306 IEMOP_HLP_MIN_286();
1307 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1308
1309 if (IEM_IS_MODRM_REG_MODE(bRm))
1310 {
1311 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1312 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1313 IEM_MC_ARG(uint16_t, u16Sel, 0);
1314 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1315 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1316 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_VerX, u16Sel, fWriteArg);
1317 IEM_MC_END();
1318 }
1319 else
1320 {
1321 IEM_MC_BEGIN(2, 1, IEM_MC_F_MIN_286, 0);
1322 IEM_MC_ARG(uint16_t, u16Sel, 0);
1323 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1326 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1327 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1328 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_VerX, u16Sel, fWriteArg);
1329 IEM_MC_END();
1330 }
1331}
1332
1333
1334/** Opcode 0x0f 0x00 /4. */
1335FNIEMOPRM_DEF(iemOp_Grp6_verr)
1336{
1337 IEMOP_MNEMONIC(verr, "verr Ew");
1338 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1339}
1340
1341
1342/** Opcode 0x0f 0x00 /5. */
1343FNIEMOPRM_DEF(iemOp_Grp6_verw)
1344{
1345 IEMOP_MNEMONIC(verw, "verw Ew");
1346 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1347}
1348
1349
1350/**
1351 * Group 6 jump table.
1352 */
1353IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1354{
1355 iemOp_Grp6_sldt,
1356 iemOp_Grp6_str,
1357 iemOp_Grp6_lldt,
1358 iemOp_Grp6_ltr,
1359 iemOp_Grp6_verr,
1360 iemOp_Grp6_verw,
1361 iemOp_InvalidWithRM,
1362 iemOp_InvalidWithRM
1363};
1364
1365/** Opcode 0x0f 0x00. */
1366FNIEMOP_DEF(iemOp_Grp6)
1367{
1368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1369 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1370}
1371
1372
1373/** Opcode 0x0f 0x01 /0. */
1374FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1375{
1376 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1377 IEMOP_HLP_MIN_286();
1378 IEMOP_HLP_64BIT_OP_SIZE();
1379 IEM_MC_BEGIN(2, 1, IEM_MC_F_MIN_286, 0);
1380 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1383 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1384 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1385 IEM_MC_END();
1386}
1387
1388
1389/** Opcode 0x0f 0x01 /0. */
1390FNIEMOP_DEF(iemOp_Grp7_vmcall)
1391{
1392 IEMOP_MNEMONIC(vmcall, "vmcall");
1393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1394
1395 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1396 want all hypercalls regardless of instruction used, and if a
1397 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1398 (NEM/win makes ASSUMPTIONS about this behavior.) */
1399 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, iemCImpl_vmcall);
1400}
1401
1402
1403/** Opcode 0x0f 0x01 /0. */
1404#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1405FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1406{
1407 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1408 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1409 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1410 IEMOP_HLP_DONE_DECODING();
1411 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1412 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
1413 iemCImpl_vmlaunch);
1414}
1415#else
1416FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1417{
1418 IEMOP_BITCH_ABOUT_STUB();
1419 IEMOP_RAISE_INVALID_OPCODE_RET();
1420}
1421#endif
1422
1423
1424/** Opcode 0x0f 0x01 /0. */
1425#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1426FNIEMOP_DEF(iemOp_Grp7_vmresume)
1427{
1428 IEMOP_MNEMONIC(vmresume, "vmresume");
1429 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1430 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1431 IEMOP_HLP_DONE_DECODING();
1432 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1433 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
1434 iemCImpl_vmresume);
1435}
1436#else
1437FNIEMOP_DEF(iemOp_Grp7_vmresume)
1438{
1439 IEMOP_BITCH_ABOUT_STUB();
1440 IEMOP_RAISE_INVALID_OPCODE_RET();
1441}
1442#endif
1443
1444
1445/** Opcode 0x0f 0x01 /0. */
1446#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1447FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1448{
1449 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1450 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1451 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1452 IEMOP_HLP_DONE_DECODING();
1453 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmxoff);
1454}
1455#else
1456FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1457{
1458 IEMOP_BITCH_ABOUT_STUB();
1459 IEMOP_RAISE_INVALID_OPCODE_RET();
1460}
1461#endif
1462
1463
1464/** Opcode 0x0f 0x01 /1. */
1465FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1466{
1467 IEMOP_MNEMONIC(sidt, "sidt Ms");
1468 IEMOP_HLP_MIN_286();
1469 IEMOP_HLP_64BIT_OP_SIZE();
1470 IEM_MC_BEGIN(2, 1, IEM_MC_F_MIN_286, 0);
1471 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1474 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1475 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1476 IEM_MC_END();
1477}
1478
1479
1480/** Opcode 0x0f 0x01 /1. */
1481FNIEMOP_DEF(iemOp_Grp7_monitor)
1482{
1483 IEMOP_MNEMONIC(monitor, "monitor");
1484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1485 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1486}
1487
1488
1489/** Opcode 0x0f 0x01 /1. */
1490FNIEMOP_DEF(iemOp_Grp7_mwait)
1491{
1492 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1494 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, iemCImpl_mwait);
1495}
1496
1497
1498/** Opcode 0x0f 0x01 /2. */
1499FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1500{
1501 IEMOP_MNEMONIC(lgdt, "lgdt");
1502 IEMOP_HLP_64BIT_OP_SIZE();
1503 IEM_MC_BEGIN(3, 1, 0, 0);
1504 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1507 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1508 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1509 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1510 IEM_MC_END();
1511}
1512
1513
1514/** Opcode 0x0f 0x01 0xd0. */
1515FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1516{
1517 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1518 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1519 {
1520 /** @todo r=ramshankar: We should use
1521 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1522 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1523/** @todo testcase: test prefixes and exceptions. currently not checking for the
1524 * OPSIZE one ... */
1525 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1526 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_xgetbv);
1527 }
1528 IEMOP_RAISE_INVALID_OPCODE_RET();
1529}
1530
1531
1532/** Opcode 0x0f 0x01 0xd1. */
1533FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1534{
1535 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1536 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1537 {
1538 /** @todo r=ramshankar: We should use
1539 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1540 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1541/** @todo testcase: test prefixes and exceptions. currently not checking for the
1542 * OPSIZE one ... */
1543 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1544 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_xsetbv);
1545 }
1546 IEMOP_RAISE_INVALID_OPCODE_RET();
1547}
1548
1549
1550/** Opcode 0x0f 0x01 /3. */
1551FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1552{
1553 IEMOP_MNEMONIC(lidt, "lidt");
1554 IEMMODE enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : pVCpu->iem.s.enmEffOpSize;
1555 IEM_MC_BEGIN(3, 1, 0, 0);
1556 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1559 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1560 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg, /*=*/ enmEffOpSize, 2);
1561 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1562 IEM_MC_END();
1563}
1564
1565
1566/** Opcode 0x0f 0x01 0xd8. */
1567#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1568FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1569{
1570 IEMOP_MNEMONIC(vmrun, "vmrun");
1571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1572 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1573 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
1574 iemCImpl_vmrun);
1575}
1576#else
1577FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1578#endif
1579
1580/** Opcode 0x0f 0x01 0xd9. */
1581FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1582{
1583 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1584 /** @todo r=bird: Table A-8 on page 524 in vol 3 has VMGEXIT for this
1585 * opcode sequence when F3 or F2 is used as prefix. So, the assumtion
1586 * here cannot be right... */
1587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1588
1589 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1590 want all hypercalls regardless of instruction used, and if a
1591 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1592 (NEM/win makes ASSUMPTIONS about this behavior.) */
1593 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmmcall);
1594}
1595
1596/** Opcode 0x0f 0x01 0xda. */
1597#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1598FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1599{
1600 IEMOP_MNEMONIC(vmload, "vmload");
1601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1602 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmload);
1603}
1604#else
1605FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1606#endif
1607
1608
1609/** Opcode 0x0f 0x01 0xdb. */
1610#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1611FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1612{
1613 IEMOP_MNEMONIC(vmsave, "vmsave");
1614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1615 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmsave);
1616}
1617#else
1618FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1619#endif
1620
1621
1622/** Opcode 0x0f 0x01 0xdc. */
1623#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1624FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1625{
1626 IEMOP_MNEMONIC(stgi, "stgi");
1627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1628 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_stgi);
1629}
1630#else
1631FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1632#endif
1633
1634
1635/** Opcode 0x0f 0x01 0xdd. */
1636#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1637FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1638{
1639 IEMOP_MNEMONIC(clgi, "clgi");
1640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1641 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_clgi);
1642}
1643#else
1644FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1645#endif
1646
1647
1648/** Opcode 0x0f 0x01 0xdf. */
1649#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1650FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1651{
1652 IEMOP_MNEMONIC(invlpga, "invlpga");
1653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1654 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_invlpga);
1655}
1656#else
1657FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1658#endif
1659
1660
1661/** Opcode 0x0f 0x01 0xde. */
1662#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1663FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1664{
1665 IEMOP_MNEMONIC(skinit, "skinit");
1666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1667 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_skinit);
1668}
1669#else
1670FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1671#endif
1672
1673
1674/** Opcode 0x0f 0x01 /4. */
1675FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1676{
1677 IEMOP_MNEMONIC(smsw, "smsw");
1678 IEMOP_HLP_MIN_286();
1679 if (IEM_IS_MODRM_REG_MODE(bRm))
1680 {
1681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1682 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1683 }
1684
1685 /* Ignore operand size here, memory refs are always 16-bit. */
1686 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1687 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1690 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1691 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1692 IEM_MC_END();
1693}
1694
1695
1696/** Opcode 0x0f 0x01 /6. */
1697FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1698{
1699 /* The operand size is effectively ignored, all is 16-bit and only the
1700 lower 3-bits are used. */
1701 IEMOP_MNEMONIC(lmsw, "lmsw");
1702 IEMOP_HLP_MIN_286();
1703 if (IEM_IS_MODRM_REG_MODE(bRm))
1704 {
1705 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1707 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1708 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1709 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1710 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1711 IEM_MC_END();
1712 }
1713 else
1714 {
1715 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1716 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1717 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1720 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1721 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1722 IEM_MC_END();
1723 }
1724}
1725
1726
1727/** Opcode 0x0f 0x01 /7. */
1728FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1729{
1730 IEMOP_MNEMONIC(invlpg, "invlpg");
1731 IEMOP_HLP_MIN_486();
1732 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
1733 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1736 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_invlpg, GCPtrEffDst);
1737 IEM_MC_END();
1738}
1739
1740
1741/** Opcode 0x0f 0x01 0xf8. */
1742FNIEMOP_DEF(iemOp_Grp7_swapgs)
1743{
1744 IEMOP_MNEMONIC(swapgs, "swapgs");
1745 IEMOP_HLP_ONLY_64BIT();
1746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1747 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_swapgs);
1748}
1749
1750
1751/** Opcode 0x0f 0x01 0xf9. */
1752FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1753{
1754 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1756 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdtscp);
1757}
1758
1759
1760/**
1761 * Group 7 jump table, memory variant.
1762 */
1763IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1764{
1765 iemOp_Grp7_sgdt,
1766 iemOp_Grp7_sidt,
1767 iemOp_Grp7_lgdt,
1768 iemOp_Grp7_lidt,
1769 iemOp_Grp7_smsw,
1770 iemOp_InvalidWithRM,
1771 iemOp_Grp7_lmsw,
1772 iemOp_Grp7_invlpg
1773};
1774
1775
1776/** Opcode 0x0f 0x01. */
1777FNIEMOP_DEF(iemOp_Grp7)
1778{
1779 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1780 if (IEM_IS_MODRM_MEM_MODE(bRm))
1781 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1782
1783 switch (IEM_GET_MODRM_REG_8(bRm))
1784 {
1785 case 0:
1786 switch (IEM_GET_MODRM_RM_8(bRm))
1787 {
1788 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1789 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1790 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1791 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1792 }
1793 IEMOP_RAISE_INVALID_OPCODE_RET();
1794
1795 case 1:
1796 switch (IEM_GET_MODRM_RM_8(bRm))
1797 {
1798 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1799 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1800 }
1801 IEMOP_RAISE_INVALID_OPCODE_RET();
1802
1803 case 2:
1804 switch (IEM_GET_MODRM_RM_8(bRm))
1805 {
1806 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1807 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1808 }
1809 IEMOP_RAISE_INVALID_OPCODE_RET();
1810
1811 case 3:
1812 switch (IEM_GET_MODRM_RM_8(bRm))
1813 {
1814 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1815 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1816 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1817 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1818 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1819 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1820 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1821 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1822 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1823 }
1824
1825 case 4:
1826 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1827
1828 case 5:
1829 IEMOP_RAISE_INVALID_OPCODE_RET();
1830
1831 case 6:
1832 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1833
1834 case 7:
1835 switch (IEM_GET_MODRM_RM_8(bRm))
1836 {
1837 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1838 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1839 }
1840 IEMOP_RAISE_INVALID_OPCODE_RET();
1841
1842 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1843 }
1844}
1845
1846/** Opcode 0x0f 0x00 /3. */
1847FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1848{
1849 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1851
1852 if (IEM_IS_MODRM_REG_MODE(bRm))
1853 {
1854 switch (pVCpu->iem.s.enmEffOpSize)
1855 {
1856 case IEMMODE_16BIT:
1857 IEM_MC_BEGIN(3, 0, 0, 0);
1858 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1859 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1860 IEM_MC_ARG(uint16_t, u16Sel, 1);
1861 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1862
1863 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1864 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1865 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(IEM_GET_MODRM_REG(pVCpu, bRm));
1866 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1867
1868 IEM_MC_END();
1869 break;
1870
1871 case IEMMODE_32BIT:
1872 case IEMMODE_64BIT:
1873 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
1874 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1875 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1876 IEM_MC_ARG(uint16_t, u16Sel, 1);
1877 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1878
1879 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1880 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1881 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(IEM_GET_MODRM_REG(pVCpu, bRm));
1882 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1883
1884 IEM_MC_END();
1885 break;
1886
1887 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1888 }
1889 }
1890 else
1891 {
1892 switch (pVCpu->iem.s.enmEffOpSize)
1893 {
1894 case IEMMODE_16BIT:
1895 IEM_MC_BEGIN(3, 1, 0, 0);
1896 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1897 IEM_MC_ARG(uint16_t, u16Sel, 1);
1898 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1900
1901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1902 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1903
1904 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1905 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1906 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(IEM_GET_MODRM_REG(pVCpu, bRm));
1907 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1908
1909 IEM_MC_END();
1910 break;
1911
1912 case IEMMODE_32BIT:
1913 case IEMMODE_64BIT:
1914 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
1915 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1916 IEM_MC_ARG(uint16_t, u16Sel, 1);
1917 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1919
1920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1921 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1922/** @todo testcase: make sure it's a 16-bit read. */
1923
1924 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1925 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1926 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(IEM_GET_MODRM_REG(pVCpu, bRm));
1927 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1928
1929 IEM_MC_END();
1930 break;
1931
1932 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1933 }
1934 }
1935}
1936
1937
1938
1939/** Opcode 0x0f 0x02. */
1940FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1941{
1942 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1943 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1944}
1945
1946
1947/** Opcode 0x0f 0x03. */
1948FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1949{
1950 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1951 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1952}
1953
1954
1955/** Opcode 0x0f 0x05. */
1956FNIEMOP_DEF(iemOp_syscall)
1957{
1958 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1960 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1961 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1962 iemCImpl_syscall);
1963}
1964
1965
1966/** Opcode 0x0f 0x06. */
1967FNIEMOP_DEF(iemOp_clts)
1968{
1969 IEMOP_MNEMONIC(clts, "clts");
1970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1971 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_clts);
1972}
1973
1974
1975/** Opcode 0x0f 0x07. */
1976FNIEMOP_DEF(iemOp_sysret)
1977{
1978 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1980 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1981 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1982 iemCImpl_sysret, pVCpu->iem.s.enmEffOpSize);
1983}
1984
1985
1986/** Opcode 0x0f 0x08. */
1987FNIEMOP_DEF(iemOp_invd)
1988{
1989 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1990 IEMOP_HLP_MIN_486();
1991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1992 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_invd);
1993}
1994
1995
1996/** Opcode 0x0f 0x09. */
1997FNIEMOP_DEF(iemOp_wbinvd)
1998{
1999 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
2000 IEMOP_HLP_MIN_486();
2001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2002 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_wbinvd);
2003}
2004
2005
2006/** Opcode 0x0f 0x0b. */
2007FNIEMOP_DEF(iemOp_ud2)
2008{
2009 IEMOP_MNEMONIC(ud2, "ud2");
2010 IEMOP_RAISE_INVALID_OPCODE_RET();
2011}
2012
2013/** Opcode 0x0f 0x0d. */
2014FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
2015{
2016 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
2017 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
2018 {
2019 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
2020 IEMOP_RAISE_INVALID_OPCODE_RET();
2021 }
2022
2023 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2024 if (IEM_IS_MODRM_REG_MODE(bRm))
2025 {
2026 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
2027 IEMOP_RAISE_INVALID_OPCODE_RET();
2028 }
2029
2030 switch (IEM_GET_MODRM_REG_8(bRm))
2031 {
2032 case 2: /* Aliased to /0 for the time being. */
2033 case 4: /* Aliased to /0 for the time being. */
2034 case 5: /* Aliased to /0 for the time being. */
2035 case 6: /* Aliased to /0 for the time being. */
2036 case 7: /* Aliased to /0 for the time being. */
2037 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2038 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2039 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2040 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2041 }
2042
2043 IEM_MC_BEGIN(0, 1, 0, 0);
2044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2047 /* Currently a NOP. */
2048 NOREF(GCPtrEffSrc);
2049 IEM_MC_ADVANCE_RIP_AND_FINISH();
2050 IEM_MC_END();
2051}
2052
2053
2054/** Opcode 0x0f 0x0e. */
2055FNIEMOP_DEF(iemOp_femms)
2056{
2057 IEMOP_MNEMONIC(femms, "femms");
2058
2059 IEM_MC_BEGIN(0, 0, 0, 0);
2060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2061 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2062 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2063 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2064 IEM_MC_FPU_FROM_MMX_MODE();
2065 IEM_MC_ADVANCE_RIP_AND_FINISH();
2066 IEM_MC_END();
2067}
2068
2069
2070/** Opcode 0x0f 0x0f. */
2071FNIEMOP_DEF(iemOp_3Dnow)
2072{
2073 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2074 {
2075 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2076 IEMOP_RAISE_INVALID_OPCODE_RET();
2077 }
2078
2079#ifdef IEM_WITH_3DNOW
2080 /* This is pretty sparse, use switch instead of table. */
2081 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2082 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2083#else
2084 IEMOP_BITCH_ABOUT_STUB();
2085 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2086#endif
2087}
2088
2089
2090/**
2091 * @opcode 0x10
2092 * @oppfx none
2093 * @opcpuid sse
2094 * @opgroup og_sse_simdfp_datamove
2095 * @opxcpttype 4UA
2096 * @optest op1=1 op2=2 -> op1=2
2097 * @optest op1=0 op2=-22 -> op1=-22
2098 */
2099FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2100{
2101 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2103 if (IEM_IS_MODRM_REG_MODE(bRm))
2104 {
2105 /*
2106 * XMM128, XMM128.
2107 */
2108 IEM_MC_BEGIN(0, 0, 0, 0);
2109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2110 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2111 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2112 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2113 IEM_GET_MODRM_RM(pVCpu, bRm));
2114 IEM_MC_ADVANCE_RIP_AND_FINISH();
2115 IEM_MC_END();
2116 }
2117 else
2118 {
2119 /*
2120 * XMM128, [mem128].
2121 */
2122 IEM_MC_BEGIN(0, 2, 0, 0);
2123 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2125
2126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2128 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2129 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2130
2131 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2132 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2133
2134 IEM_MC_ADVANCE_RIP_AND_FINISH();
2135 IEM_MC_END();
2136 }
2137
2138}
2139
2140
2141/**
2142 * @opcode 0x10
2143 * @oppfx 0x66
2144 * @opcpuid sse2
2145 * @opgroup og_sse2_pcksclr_datamove
2146 * @opxcpttype 4UA
2147 * @optest op1=1 op2=2 -> op1=2
2148 * @optest op1=0 op2=-42 -> op1=-42
2149 */
2150FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2151{
2152 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2154 if (IEM_IS_MODRM_REG_MODE(bRm))
2155 {
2156 /*
2157 * XMM128, XMM128.
2158 */
2159 IEM_MC_BEGIN(0, 0, 0, 0);
2160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2161 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2162 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2163 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2164 IEM_GET_MODRM_RM(pVCpu, bRm));
2165 IEM_MC_ADVANCE_RIP_AND_FINISH();
2166 IEM_MC_END();
2167 }
2168 else
2169 {
2170 /*
2171 * XMM128, [mem128].
2172 */
2173 IEM_MC_BEGIN(0, 2, 0, 0);
2174 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2175 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2176
2177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2179 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2180 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2181
2182 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2183 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2184
2185 IEM_MC_ADVANCE_RIP_AND_FINISH();
2186 IEM_MC_END();
2187 }
2188}
2189
2190
2191/**
2192 * @opcode 0x10
2193 * @oppfx 0xf3
2194 * @opcpuid sse
2195 * @opgroup og_sse_simdfp_datamove
2196 * @opxcpttype 5
2197 * @optest op1=1 op2=2 -> op1=2
2198 * @optest op1=0 op2=-22 -> op1=-22
2199 */
2200FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2201{
2202 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2203 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2204 if (IEM_IS_MODRM_REG_MODE(bRm))
2205 {
2206 /*
2207 * XMM32, XMM32.
2208 */
2209 IEM_MC_BEGIN(0, 1, 0, 0);
2210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2211 IEM_MC_LOCAL(uint32_t, uSrc);
2212
2213 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2214 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2215 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2216 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2217
2218 IEM_MC_ADVANCE_RIP_AND_FINISH();
2219 IEM_MC_END();
2220 }
2221 else
2222 {
2223 /*
2224 * XMM128, [mem32].
2225 */
2226 IEM_MC_BEGIN(0, 2, 0, 0);
2227 IEM_MC_LOCAL(uint32_t, uSrc);
2228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2229
2230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2232 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2233 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2234
2235 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2236 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2237
2238 IEM_MC_ADVANCE_RIP_AND_FINISH();
2239 IEM_MC_END();
2240 }
2241}
2242
2243
2244/**
2245 * @opcode 0x10
2246 * @oppfx 0xf2
2247 * @opcpuid sse2
2248 * @opgroup og_sse2_pcksclr_datamove
2249 * @opxcpttype 5
2250 * @optest op1=1 op2=2 -> op1=2
2251 * @optest op1=0 op2=-42 -> op1=-42
2252 */
2253FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2254{
2255 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2257 if (IEM_IS_MODRM_REG_MODE(bRm))
2258 {
2259 /*
2260 * XMM64, XMM64.
2261 */
2262 IEM_MC_BEGIN(0, 1, 0, 0);
2263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2264 IEM_MC_LOCAL(uint64_t, uSrc);
2265
2266 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2267 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2268 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2269 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2270
2271 IEM_MC_ADVANCE_RIP_AND_FINISH();
2272 IEM_MC_END();
2273 }
2274 else
2275 {
2276 /*
2277 * XMM128, [mem64].
2278 */
2279 IEM_MC_BEGIN(0, 2, 0, 0);
2280 IEM_MC_LOCAL(uint64_t, uSrc);
2281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2282
2283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2285 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2286 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2287
2288 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2289 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2290
2291 IEM_MC_ADVANCE_RIP_AND_FINISH();
2292 IEM_MC_END();
2293 }
2294}
2295
2296
2297/**
2298 * @opcode 0x11
2299 * @oppfx none
2300 * @opcpuid sse
2301 * @opgroup og_sse_simdfp_datamove
2302 * @opxcpttype 4UA
2303 * @optest op1=1 op2=2 -> op1=2
2304 * @optest op1=0 op2=-42 -> op1=-42
2305 */
2306FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2307{
2308 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2309 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2310 if (IEM_IS_MODRM_REG_MODE(bRm))
2311 {
2312 /*
2313 * XMM128, XMM128.
2314 */
2315 IEM_MC_BEGIN(0, 0, 0, 0);
2316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2317 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2318 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2319 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2320 IEM_GET_MODRM_REG(pVCpu, bRm));
2321 IEM_MC_ADVANCE_RIP_AND_FINISH();
2322 IEM_MC_END();
2323 }
2324 else
2325 {
2326 /*
2327 * [mem128], XMM128.
2328 */
2329 IEM_MC_BEGIN(0, 2, 0, 0);
2330 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2332
2333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2335 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2336 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2337
2338 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2339 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2340
2341 IEM_MC_ADVANCE_RIP_AND_FINISH();
2342 IEM_MC_END();
2343 }
2344}
2345
2346
2347/**
2348 * @opcode 0x11
2349 * @oppfx 0x66
2350 * @opcpuid sse2
2351 * @opgroup og_sse2_pcksclr_datamove
2352 * @opxcpttype 4UA
2353 * @optest op1=1 op2=2 -> op1=2
2354 * @optest op1=0 op2=-42 -> op1=-42
2355 */
2356FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2357{
2358 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2359 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2360 if (IEM_IS_MODRM_REG_MODE(bRm))
2361 {
2362 /*
2363 * XMM128, XMM128.
2364 */
2365 IEM_MC_BEGIN(0, 0, 0, 0);
2366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2367 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2368 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2369 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2370 IEM_GET_MODRM_REG(pVCpu, bRm));
2371 IEM_MC_ADVANCE_RIP_AND_FINISH();
2372 IEM_MC_END();
2373 }
2374 else
2375 {
2376 /*
2377 * [mem128], XMM128.
2378 */
2379 IEM_MC_BEGIN(0, 2, 0, 0);
2380 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2382
2383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2385 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2386 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2387
2388 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2389 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2390
2391 IEM_MC_ADVANCE_RIP_AND_FINISH();
2392 IEM_MC_END();
2393 }
2394}
2395
2396
2397/**
2398 * @opcode 0x11
2399 * @oppfx 0xf3
2400 * @opcpuid sse
2401 * @opgroup og_sse_simdfp_datamove
2402 * @opxcpttype 5
2403 * @optest op1=1 op2=2 -> op1=2
2404 * @optest op1=0 op2=-22 -> op1=-22
2405 */
2406FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2407{
2408 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2409 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2410 if (IEM_IS_MODRM_REG_MODE(bRm))
2411 {
2412 /*
2413 * XMM32, XMM32.
2414 */
2415 IEM_MC_BEGIN(0, 1, 0, 0);
2416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2417 IEM_MC_LOCAL(uint32_t, uSrc);
2418
2419 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2420 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2421 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2422 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2423
2424 IEM_MC_ADVANCE_RIP_AND_FINISH();
2425 IEM_MC_END();
2426 }
2427 else
2428 {
2429 /*
2430 * [mem32], XMM32.
2431 */
2432 IEM_MC_BEGIN(0, 2, 0, 0);
2433 IEM_MC_LOCAL(uint32_t, uSrc);
2434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2435
2436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2438 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2439 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2440
2441 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2442 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2443
2444 IEM_MC_ADVANCE_RIP_AND_FINISH();
2445 IEM_MC_END();
2446 }
2447}
2448
2449
2450/**
2451 * @opcode 0x11
2452 * @oppfx 0xf2
2453 * @opcpuid sse2
2454 * @opgroup og_sse2_pcksclr_datamove
2455 * @opxcpttype 5
2456 * @optest op1=1 op2=2 -> op1=2
2457 * @optest op1=0 op2=-42 -> op1=-42
2458 */
2459FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2460{
2461 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2462 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2463 if (IEM_IS_MODRM_REG_MODE(bRm))
2464 {
2465 /*
2466 * XMM64, XMM64.
2467 */
2468 IEM_MC_BEGIN(0, 1, 0, 0);
2469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2470 IEM_MC_LOCAL(uint64_t, uSrc);
2471
2472 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2473 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2474 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2475 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2476
2477 IEM_MC_ADVANCE_RIP_AND_FINISH();
2478 IEM_MC_END();
2479 }
2480 else
2481 {
2482 /*
2483 * [mem64], XMM64.
2484 */
2485 IEM_MC_BEGIN(0, 2, 0, 0);
2486 IEM_MC_LOCAL(uint64_t, uSrc);
2487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2488
2489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2491 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2492 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2493
2494 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2495 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2496
2497 IEM_MC_ADVANCE_RIP_AND_FINISH();
2498 IEM_MC_END();
2499 }
2500}
2501
2502
2503FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2504{
2505 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2506 if (IEM_IS_MODRM_REG_MODE(bRm))
2507 {
2508 /**
2509 * @opcode 0x12
2510 * @opcodesub 11 mr/reg
2511 * @oppfx none
2512 * @opcpuid sse
2513 * @opgroup og_sse_simdfp_datamove
2514 * @opxcpttype 5
2515 * @optest op1=1 op2=2 -> op1=2
2516 * @optest op1=0 op2=-42 -> op1=-42
2517 */
2518 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2519
2520 IEM_MC_BEGIN(0, 1, 0, 0);
2521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2522 IEM_MC_LOCAL(uint64_t, uSrc);
2523
2524 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2525 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2526 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2527 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2528
2529 IEM_MC_ADVANCE_RIP_AND_FINISH();
2530 IEM_MC_END();
2531 }
2532 else
2533 {
2534 /**
2535 * @opdone
2536 * @opcode 0x12
2537 * @opcodesub !11 mr/reg
2538 * @oppfx none
2539 * @opcpuid sse
2540 * @opgroup og_sse_simdfp_datamove
2541 * @opxcpttype 5
2542 * @optest op1=1 op2=2 -> op1=2
2543 * @optest op1=0 op2=-42 -> op1=-42
2544 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2545 */
2546 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2547
2548 IEM_MC_BEGIN(0, 2, 0, 0);
2549 IEM_MC_LOCAL(uint64_t, uSrc);
2550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2551
2552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2554 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2555 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2556
2557 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2558 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2559
2560 IEM_MC_ADVANCE_RIP_AND_FINISH();
2561 IEM_MC_END();
2562 }
2563}
2564
2565
2566/**
2567 * @opcode 0x12
2568 * @opcodesub !11 mr/reg
2569 * @oppfx 0x66
2570 * @opcpuid sse2
2571 * @opgroup og_sse2_pcksclr_datamove
2572 * @opxcpttype 5
2573 * @optest op1=1 op2=2 -> op1=2
2574 * @optest op1=0 op2=-42 -> op1=-42
2575 */
2576FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2577{
2578 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2579 if (IEM_IS_MODRM_MEM_MODE(bRm))
2580 {
2581 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2582
2583 IEM_MC_BEGIN(0, 2, 0, 0);
2584 IEM_MC_LOCAL(uint64_t, uSrc);
2585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2586
2587 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2589 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2590 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2591
2592 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2593 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2594
2595 IEM_MC_ADVANCE_RIP_AND_FINISH();
2596 IEM_MC_END();
2597 }
2598
2599 /**
2600 * @opdone
2601 * @opmnemonic ud660f12m3
2602 * @opcode 0x12
2603 * @opcodesub 11 mr/reg
2604 * @oppfx 0x66
2605 * @opunused immediate
2606 * @opcpuid sse
2607 * @optest ->
2608 */
2609 else
2610 IEMOP_RAISE_INVALID_OPCODE_RET();
2611}
2612
2613
2614/**
2615 * @opcode 0x12
2616 * @oppfx 0xf3
2617 * @opcpuid sse3
2618 * @opgroup og_sse3_pcksclr_datamove
2619 * @opxcpttype 4
2620 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2621 * op1=0x00000002000000020000000100000001
2622 */
2623FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2624{
2625 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2626 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2627 if (IEM_IS_MODRM_REG_MODE(bRm))
2628 {
2629 /*
2630 * XMM, XMM.
2631 */
2632 IEM_MC_BEGIN(0, 1, 0, 0);
2633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2634 IEM_MC_LOCAL(RTUINT128U, uSrc);
2635
2636 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2637 IEM_MC_PREPARE_SSE_USAGE();
2638
2639 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2640 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2641 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2642 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2643 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2644
2645 IEM_MC_ADVANCE_RIP_AND_FINISH();
2646 IEM_MC_END();
2647 }
2648 else
2649 {
2650 /*
2651 * XMM, [mem128].
2652 */
2653 IEM_MC_BEGIN(0, 2, 0, 0);
2654 IEM_MC_LOCAL(RTUINT128U, uSrc);
2655 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2656
2657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2659 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2660 IEM_MC_PREPARE_SSE_USAGE();
2661
2662 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2663 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2664 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2665 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2666 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2667
2668 IEM_MC_ADVANCE_RIP_AND_FINISH();
2669 IEM_MC_END();
2670 }
2671}
2672
2673
2674/**
2675 * @opcode 0x12
2676 * @oppfx 0xf2
2677 * @opcpuid sse3
2678 * @opgroup og_sse3_pcksclr_datamove
2679 * @opxcpttype 5
2680 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2681 * op1=0x22222222111111112222222211111111
2682 */
2683FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2684{
2685 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2686 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2687 if (IEM_IS_MODRM_REG_MODE(bRm))
2688 {
2689 /*
2690 * XMM128, XMM64.
2691 */
2692 IEM_MC_BEGIN(0, 1, 0, 0);
2693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2694 IEM_MC_LOCAL(uint64_t, uSrc);
2695
2696 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2697 IEM_MC_PREPARE_SSE_USAGE();
2698
2699 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2700 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2701 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2702
2703 IEM_MC_ADVANCE_RIP_AND_FINISH();
2704 IEM_MC_END();
2705 }
2706 else
2707 {
2708 /*
2709 * XMM128, [mem64].
2710 */
2711 IEM_MC_BEGIN(0, 2, 0, 0);
2712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2713 IEM_MC_LOCAL(uint64_t, uSrc);
2714
2715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2717 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2718 IEM_MC_PREPARE_SSE_USAGE();
2719
2720 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2721 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2722 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2723
2724 IEM_MC_ADVANCE_RIP_AND_FINISH();
2725 IEM_MC_END();
2726 }
2727}
2728
2729
2730/**
2731 * @opcode 0x13
2732 * @opcodesub !11 mr/reg
2733 * @oppfx none
2734 * @opcpuid sse
2735 * @opgroup og_sse_simdfp_datamove
2736 * @opxcpttype 5
2737 * @optest op1=1 op2=2 -> op1=2
2738 * @optest op1=0 op2=-42 -> op1=-42
2739 */
2740FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2741{
2742 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2743 if (IEM_IS_MODRM_MEM_MODE(bRm))
2744 {
2745 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2746
2747 IEM_MC_BEGIN(0, 2, 0, 0);
2748 IEM_MC_LOCAL(uint64_t, uSrc);
2749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2750
2751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2753 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2754 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2755
2756 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2757 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2758
2759 IEM_MC_ADVANCE_RIP_AND_FINISH();
2760 IEM_MC_END();
2761 }
2762
2763 /**
2764 * @opdone
2765 * @opmnemonic ud0f13m3
2766 * @opcode 0x13
2767 * @opcodesub 11 mr/reg
2768 * @oppfx none
2769 * @opunused immediate
2770 * @opcpuid sse
2771 * @optest ->
2772 */
2773 else
2774 IEMOP_RAISE_INVALID_OPCODE_RET();
2775}
2776
2777
2778/**
2779 * @opcode 0x13
2780 * @opcodesub !11 mr/reg
2781 * @oppfx 0x66
2782 * @opcpuid sse2
2783 * @opgroup og_sse2_pcksclr_datamove
2784 * @opxcpttype 5
2785 * @optest op1=1 op2=2 -> op1=2
2786 * @optest op1=0 op2=-42 -> op1=-42
2787 */
2788FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2789{
2790 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2791 if (IEM_IS_MODRM_MEM_MODE(bRm))
2792 {
2793 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2794
2795 IEM_MC_BEGIN(0, 2, 0, 0);
2796 IEM_MC_LOCAL(uint64_t, uSrc);
2797 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2798
2799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2801 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2802 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2803
2804 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2805 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2806
2807 IEM_MC_ADVANCE_RIP_AND_FINISH();
2808 IEM_MC_END();
2809 }
2810
2811 /**
2812 * @opdone
2813 * @opmnemonic ud660f13m3
2814 * @opcode 0x13
2815 * @opcodesub 11 mr/reg
2816 * @oppfx 0x66
2817 * @opunused immediate
2818 * @opcpuid sse
2819 * @optest ->
2820 */
2821 else
2822 IEMOP_RAISE_INVALID_OPCODE_RET();
2823}
2824
2825
2826/**
2827 * @opmnemonic udf30f13
2828 * @opcode 0x13
2829 * @oppfx 0xf3
2830 * @opunused intel-modrm
2831 * @opcpuid sse
2832 * @optest ->
2833 * @opdone
2834 */
2835
2836/**
2837 * @opmnemonic udf20f13
2838 * @opcode 0x13
2839 * @oppfx 0xf2
2840 * @opunused intel-modrm
2841 * @opcpuid sse
2842 * @optest ->
2843 * @opdone
2844 */
2845
2846/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2847FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2848{
2849 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2850 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2851}
2852
2853
2854/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2855FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2856{
2857 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2858 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2859}
2860
2861
2862/**
2863 * @opdone
2864 * @opmnemonic udf30f14
2865 * @opcode 0x14
2866 * @oppfx 0xf3
2867 * @opunused intel-modrm
2868 * @opcpuid sse
2869 * @optest ->
2870 * @opdone
2871 */
2872
2873/**
2874 * @opmnemonic udf20f14
2875 * @opcode 0x14
2876 * @oppfx 0xf2
2877 * @opunused intel-modrm
2878 * @opcpuid sse
2879 * @optest ->
2880 * @opdone
2881 */
2882
2883/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2884FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2885{
2886 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2887 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2888}
2889
2890
2891/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2892FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2893{
2894 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2895 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2896}
2897
2898
2899/* Opcode 0xf3 0x0f 0x15 - invalid */
2900/* Opcode 0xf2 0x0f 0x15 - invalid */
2901
2902/**
2903 * @opdone
2904 * @opmnemonic udf30f15
2905 * @opcode 0x15
2906 * @oppfx 0xf3
2907 * @opunused intel-modrm
2908 * @opcpuid sse
2909 * @optest ->
2910 * @opdone
2911 */
2912
2913/**
2914 * @opmnemonic udf20f15
2915 * @opcode 0x15
2916 * @oppfx 0xf2
2917 * @opunused intel-modrm
2918 * @opcpuid sse
2919 * @optest ->
2920 * @opdone
2921 */
2922
2923FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2924{
2925 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2926 if (IEM_IS_MODRM_REG_MODE(bRm))
2927 {
2928 /**
2929 * @opcode 0x16
2930 * @opcodesub 11 mr/reg
2931 * @oppfx none
2932 * @opcpuid sse
2933 * @opgroup og_sse_simdfp_datamove
2934 * @opxcpttype 5
2935 * @optest op1=1 op2=2 -> op1=2
2936 * @optest op1=0 op2=-42 -> op1=-42
2937 */
2938 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2939
2940 IEM_MC_BEGIN(0, 1, 0, 0);
2941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2942 IEM_MC_LOCAL(uint64_t, uSrc);
2943
2944 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2945 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2946 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2947 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2948
2949 IEM_MC_ADVANCE_RIP_AND_FINISH();
2950 IEM_MC_END();
2951 }
2952 else
2953 {
2954 /**
2955 * @opdone
2956 * @opcode 0x16
2957 * @opcodesub !11 mr/reg
2958 * @oppfx none
2959 * @opcpuid sse
2960 * @opgroup og_sse_simdfp_datamove
2961 * @opxcpttype 5
2962 * @optest op1=1 op2=2 -> op1=2
2963 * @optest op1=0 op2=-42 -> op1=-42
2964 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2965 */
2966 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2967
2968 IEM_MC_BEGIN(0, 2, 0, 0);
2969 IEM_MC_LOCAL(uint64_t, uSrc);
2970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2971
2972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2974 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2975 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2976
2977 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2978 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2979
2980 IEM_MC_ADVANCE_RIP_AND_FINISH();
2981 IEM_MC_END();
2982 }
2983}
2984
2985
2986/**
2987 * @opcode 0x16
2988 * @opcodesub !11 mr/reg
2989 * @oppfx 0x66
2990 * @opcpuid sse2
2991 * @opgroup og_sse2_pcksclr_datamove
2992 * @opxcpttype 5
2993 * @optest op1=1 op2=2 -> op1=2
2994 * @optest op1=0 op2=-42 -> op1=-42
2995 */
2996FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2997{
2998 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2999 if (IEM_IS_MODRM_MEM_MODE(bRm))
3000 {
3001 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3002
3003 IEM_MC_BEGIN(0, 2, 0, 0);
3004 IEM_MC_LOCAL(uint64_t, uSrc);
3005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3006
3007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3009 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3010 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3011
3012 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3013 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3014
3015 IEM_MC_ADVANCE_RIP_AND_FINISH();
3016 IEM_MC_END();
3017 }
3018
3019 /**
3020 * @opdone
3021 * @opmnemonic ud660f16m3
3022 * @opcode 0x16
3023 * @opcodesub 11 mr/reg
3024 * @oppfx 0x66
3025 * @opunused immediate
3026 * @opcpuid sse
3027 * @optest ->
3028 */
3029 else
3030 IEMOP_RAISE_INVALID_OPCODE_RET();
3031}
3032
3033
3034/**
3035 * @opcode 0x16
3036 * @oppfx 0xf3
3037 * @opcpuid sse3
3038 * @opgroup og_sse3_pcksclr_datamove
3039 * @opxcpttype 4
3040 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3041 * op1=0x00000002000000020000000100000001
3042 */
3043FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3044{
3045 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3046 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3047 if (IEM_IS_MODRM_REG_MODE(bRm))
3048 {
3049 /*
3050 * XMM128, XMM128.
3051 */
3052 IEM_MC_BEGIN(0, 1, 0, 0);
3053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3054 IEM_MC_LOCAL(RTUINT128U, uSrc);
3055
3056 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3057 IEM_MC_PREPARE_SSE_USAGE();
3058
3059 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3060 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3061 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3062 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3063 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3064
3065 IEM_MC_ADVANCE_RIP_AND_FINISH();
3066 IEM_MC_END();
3067 }
3068 else
3069 {
3070 /*
3071 * XMM128, [mem128].
3072 */
3073 IEM_MC_BEGIN(0, 2, 0, 0);
3074 IEM_MC_LOCAL(RTUINT128U, uSrc);
3075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3076
3077 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3079 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3080 IEM_MC_PREPARE_SSE_USAGE();
3081
3082 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3083 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3084 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3085 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3086 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3087
3088 IEM_MC_ADVANCE_RIP_AND_FINISH();
3089 IEM_MC_END();
3090 }
3091}
3092
3093/**
3094 * @opdone
3095 * @opmnemonic udf30f16
3096 * @opcode 0x16
3097 * @oppfx 0xf2
3098 * @opunused intel-modrm
3099 * @opcpuid sse
3100 * @optest ->
3101 * @opdone
3102 */
3103
3104
3105/**
3106 * @opcode 0x17
3107 * @opcodesub !11 mr/reg
3108 * @oppfx none
3109 * @opcpuid sse
3110 * @opgroup og_sse_simdfp_datamove
3111 * @opxcpttype 5
3112 * @optest op1=1 op2=2 -> op1=2
3113 * @optest op1=0 op2=-42 -> op1=-42
3114 */
3115FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3116{
3117 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3118 if (IEM_IS_MODRM_MEM_MODE(bRm))
3119 {
3120 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3121
3122 IEM_MC_BEGIN(0, 2, 0, 0);
3123 IEM_MC_LOCAL(uint64_t, uSrc);
3124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3125
3126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3128 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3129 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3130
3131 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3132 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3133
3134 IEM_MC_ADVANCE_RIP_AND_FINISH();
3135 IEM_MC_END();
3136 }
3137
3138 /**
3139 * @opdone
3140 * @opmnemonic ud0f17m3
3141 * @opcode 0x17
3142 * @opcodesub 11 mr/reg
3143 * @oppfx none
3144 * @opunused immediate
3145 * @opcpuid sse
3146 * @optest ->
3147 */
3148 else
3149 IEMOP_RAISE_INVALID_OPCODE_RET();
3150}
3151
3152
3153/**
3154 * @opcode 0x17
3155 * @opcodesub !11 mr/reg
3156 * @oppfx 0x66
3157 * @opcpuid sse2
3158 * @opgroup og_sse2_pcksclr_datamove
3159 * @opxcpttype 5
3160 * @optest op1=1 op2=2 -> op1=2
3161 * @optest op1=0 op2=-42 -> op1=-42
3162 */
3163FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3164{
3165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3166 if (IEM_IS_MODRM_MEM_MODE(bRm))
3167 {
3168 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3169
3170 IEM_MC_BEGIN(0, 2, 0, 0);
3171 IEM_MC_LOCAL(uint64_t, uSrc);
3172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3173
3174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3176 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3177 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3178
3179 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3180 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3181
3182 IEM_MC_ADVANCE_RIP_AND_FINISH();
3183 IEM_MC_END();
3184 }
3185
3186 /**
3187 * @opdone
3188 * @opmnemonic ud660f17m3
3189 * @opcode 0x17
3190 * @opcodesub 11 mr/reg
3191 * @oppfx 0x66
3192 * @opunused immediate
3193 * @opcpuid sse
3194 * @optest ->
3195 */
3196 else
3197 IEMOP_RAISE_INVALID_OPCODE_RET();
3198}
3199
3200
3201/**
3202 * @opdone
3203 * @opmnemonic udf30f17
3204 * @opcode 0x17
3205 * @oppfx 0xf3
3206 * @opunused intel-modrm
3207 * @opcpuid sse
3208 * @optest ->
3209 * @opdone
3210 */
3211
3212/**
3213 * @opmnemonic udf20f17
3214 * @opcode 0x17
3215 * @oppfx 0xf2
3216 * @opunused intel-modrm
3217 * @opcpuid sse
3218 * @optest ->
3219 * @opdone
3220 */
3221
3222
3223/** Opcode 0x0f 0x18. */
3224FNIEMOP_DEF(iemOp_prefetch_Grp16)
3225{
3226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3227 if (IEM_IS_MODRM_MEM_MODE(bRm))
3228 {
3229 switch (IEM_GET_MODRM_REG_8(bRm))
3230 {
3231 case 4: /* Aliased to /0 for the time being according to AMD. */
3232 case 5: /* Aliased to /0 for the time being according to AMD. */
3233 case 6: /* Aliased to /0 for the time being according to AMD. */
3234 case 7: /* Aliased to /0 for the time being according to AMD. */
3235 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3236 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3237 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3238 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3239 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3240 }
3241
3242 IEM_MC_BEGIN(0, 1, 0, 0);
3243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3246 /* Currently a NOP. */
3247 NOREF(GCPtrEffSrc);
3248 IEM_MC_ADVANCE_RIP_AND_FINISH();
3249 IEM_MC_END();
3250 }
3251 else
3252 IEMOP_RAISE_INVALID_OPCODE_RET();
3253}
3254
3255
3256/** Opcode 0x0f 0x19..0x1f. */
3257FNIEMOP_DEF(iemOp_nop_Ev)
3258{
3259 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3261 if (IEM_IS_MODRM_REG_MODE(bRm))
3262 {
3263 IEM_MC_BEGIN(0, 0, 0, 0);
3264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3265 IEM_MC_ADVANCE_RIP_AND_FINISH();
3266 IEM_MC_END();
3267 }
3268 else
3269 {
3270 IEM_MC_BEGIN(0, 1, 0, 0);
3271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3274 /* Currently a NOP. */
3275 NOREF(GCPtrEffSrc);
3276 IEM_MC_ADVANCE_RIP_AND_FINISH();
3277 IEM_MC_END();
3278 }
3279}
3280
3281
3282/** Opcode 0x0f 0x20. */
3283FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3284{
3285 /* mod is ignored, as is operand size overrides. */
3286 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3287 IEMOP_HLP_MIN_386();
3288 if (IEM_IS_64BIT_CODE(pVCpu))
3289 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3290 else
3291 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3292
3293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3294 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3295 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3296 {
3297 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3298 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3299 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3300 iCrReg |= 8;
3301 }
3302 switch (iCrReg)
3303 {
3304 case 0: case 2: case 3: case 4: case 8:
3305 break;
3306 default:
3307 IEMOP_RAISE_INVALID_OPCODE_RET();
3308 }
3309 IEMOP_HLP_DONE_DECODING();
3310
3311 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3312}
3313
3314
3315/** Opcode 0x0f 0x21. */
3316FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3317{
3318 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3319 IEMOP_HLP_MIN_386();
3320 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3322 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3323 IEMOP_RAISE_INVALID_OPCODE_RET();
3324 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_mov_Rd_Dd, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3325}
3326
3327
3328/** Opcode 0x0f 0x22. */
3329FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3330{
3331 /* mod is ignored, as is operand size overrides. */
3332 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3333 IEMOP_HLP_MIN_386();
3334 if (IEM_IS_64BIT_CODE(pVCpu))
3335 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3336 else
3337 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3338
3339 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3340 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3341 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3342 {
3343 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3344 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3345 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3346 iCrReg |= 8;
3347 }
3348 switch (iCrReg)
3349 {
3350 case 0: case 2: case 3: case 4: case 8:
3351 break;
3352 default:
3353 IEMOP_RAISE_INVALID_OPCODE_RET();
3354 }
3355 IEMOP_HLP_DONE_DECODING();
3356
3357 if (iCrReg & (2 | 8))
3358 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3359 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3360 else
3361 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT,
3362 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3363}
3364
3365
3366/** Opcode 0x0f 0x23. */
3367FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3368{
3369 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3370 IEMOP_HLP_MIN_386();
3371 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3373 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3374 IEMOP_RAISE_INVALID_OPCODE_RET();
3375 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT,
3376 iemCImpl_mov_Dd_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3377}
3378
3379
3380/** Opcode 0x0f 0x24. */
3381FNIEMOP_DEF(iemOp_mov_Rd_Td)
3382{
3383 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3384 IEMOP_HLP_MIN_386();
3385 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3387 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3388 IEMOP_RAISE_INVALID_OPCODE_RET();
3389 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_mov_Rd_Td, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3390}
3391
3392
3393/** Opcode 0x0f 0x26. */
3394FNIEMOP_DEF(iemOp_mov_Td_Rd)
3395{
3396 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3397 IEMOP_HLP_MIN_386();
3398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3400 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3401 IEMOP_RAISE_INVALID_OPCODE_RET();
3402 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_mov_Td_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3403}
3404
3405
3406/**
3407 * @opcode 0x28
3408 * @oppfx none
3409 * @opcpuid sse
3410 * @opgroup og_sse_simdfp_datamove
3411 * @opxcpttype 1
3412 * @optest op1=1 op2=2 -> op1=2
3413 * @optest op1=0 op2=-42 -> op1=-42
3414 */
3415FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3416{
3417 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3419 if (IEM_IS_MODRM_REG_MODE(bRm))
3420 {
3421 /*
3422 * Register, register.
3423 */
3424 IEM_MC_BEGIN(0, 0, 0, 0);
3425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3426 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3427 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3428 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3429 IEM_GET_MODRM_RM(pVCpu, bRm));
3430 IEM_MC_ADVANCE_RIP_AND_FINISH();
3431 IEM_MC_END();
3432 }
3433 else
3434 {
3435 /*
3436 * Register, memory.
3437 */
3438 IEM_MC_BEGIN(0, 2, 0, 0);
3439 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3441
3442 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3444 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3445 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3446
3447 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3448 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3449
3450 IEM_MC_ADVANCE_RIP_AND_FINISH();
3451 IEM_MC_END();
3452 }
3453}
3454
3455/**
3456 * @opcode 0x28
3457 * @oppfx 66
3458 * @opcpuid sse2
3459 * @opgroup og_sse2_pcksclr_datamove
3460 * @opxcpttype 1
3461 * @optest op1=1 op2=2 -> op1=2
3462 * @optest op1=0 op2=-42 -> op1=-42
3463 */
3464FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3465{
3466 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3468 if (IEM_IS_MODRM_REG_MODE(bRm))
3469 {
3470 /*
3471 * Register, register.
3472 */
3473 IEM_MC_BEGIN(0, 0, 0, 0);
3474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3475 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3476 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3477 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3478 IEM_GET_MODRM_RM(pVCpu, bRm));
3479 IEM_MC_ADVANCE_RIP_AND_FINISH();
3480 IEM_MC_END();
3481 }
3482 else
3483 {
3484 /*
3485 * Register, memory.
3486 */
3487 IEM_MC_BEGIN(0, 2, 0, 0);
3488 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3490
3491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3493 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3494 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3495
3496 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3497 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3498
3499 IEM_MC_ADVANCE_RIP_AND_FINISH();
3500 IEM_MC_END();
3501 }
3502}
3503
3504/* Opcode 0xf3 0x0f 0x28 - invalid */
3505/* Opcode 0xf2 0x0f 0x28 - invalid */
3506
3507/**
3508 * @opcode 0x29
3509 * @oppfx none
3510 * @opcpuid sse
3511 * @opgroup og_sse_simdfp_datamove
3512 * @opxcpttype 1
3513 * @optest op1=1 op2=2 -> op1=2
3514 * @optest op1=0 op2=-42 -> op1=-42
3515 */
3516FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3517{
3518 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3520 if (IEM_IS_MODRM_REG_MODE(bRm))
3521 {
3522 /*
3523 * Register, register.
3524 */
3525 IEM_MC_BEGIN(0, 0, 0, 0);
3526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3527 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3528 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3529 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3530 IEM_GET_MODRM_REG(pVCpu, bRm));
3531 IEM_MC_ADVANCE_RIP_AND_FINISH();
3532 IEM_MC_END();
3533 }
3534 else
3535 {
3536 /*
3537 * Memory, register.
3538 */
3539 IEM_MC_BEGIN(0, 2, 0, 0);
3540 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3541 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3542
3543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3545 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3546 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3547
3548 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3549 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3550
3551 IEM_MC_ADVANCE_RIP_AND_FINISH();
3552 IEM_MC_END();
3553 }
3554}
3555
3556/**
3557 * @opcode 0x29
3558 * @oppfx 66
3559 * @opcpuid sse2
3560 * @opgroup og_sse2_pcksclr_datamove
3561 * @opxcpttype 1
3562 * @optest op1=1 op2=2 -> op1=2
3563 * @optest op1=0 op2=-42 -> op1=-42
3564 */
3565FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3566{
3567 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3568 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3569 if (IEM_IS_MODRM_REG_MODE(bRm))
3570 {
3571 /*
3572 * Register, register.
3573 */
3574 IEM_MC_BEGIN(0, 0, 0, 0);
3575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3576 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3577 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3578 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3579 IEM_GET_MODRM_REG(pVCpu, bRm));
3580 IEM_MC_ADVANCE_RIP_AND_FINISH();
3581 IEM_MC_END();
3582 }
3583 else
3584 {
3585 /*
3586 * Memory, register.
3587 */
3588 IEM_MC_BEGIN(0, 2, 0, 0);
3589 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3591
3592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3594 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3595 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3596
3597 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3598 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3599
3600 IEM_MC_ADVANCE_RIP_AND_FINISH();
3601 IEM_MC_END();
3602 }
3603}
3604
3605/* Opcode 0xf3 0x0f 0x29 - invalid */
3606/* Opcode 0xf2 0x0f 0x29 - invalid */
3607
3608
3609/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3610FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3611{
3612 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3613 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3614 if (IEM_IS_MODRM_REG_MODE(bRm))
3615 {
3616 /*
3617 * XMM, MMX
3618 */
3619 IEM_MC_BEGIN(3, 1, 0, 0);
3620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3621 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3622 IEM_MC_LOCAL(X86XMMREG, Dst);
3623 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3624 IEM_MC_ARG(uint64_t, u64Src, 2);
3625 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3626 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3627 IEM_MC_PREPARE_FPU_USAGE();
3628 IEM_MC_FPU_TO_MMX_MODE();
3629
3630 IEM_MC_REF_MXCSR(pfMxcsr);
3631 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3632 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3633
3634 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3635 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3636 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3637 } IEM_MC_ELSE() {
3638 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3639 } IEM_MC_ENDIF();
3640
3641 IEM_MC_ADVANCE_RIP_AND_FINISH();
3642 IEM_MC_END();
3643 }
3644 else
3645 {
3646 /*
3647 * XMM, [mem64]
3648 */
3649 IEM_MC_BEGIN(3, 2, 0, 0);
3650 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3651 IEM_MC_LOCAL(X86XMMREG, Dst);
3652 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3653 IEM_MC_ARG(uint64_t, u64Src, 2);
3654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3655
3656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3658 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3659 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3660 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3661
3662 IEM_MC_PREPARE_FPU_USAGE();
3663 IEM_MC_FPU_TO_MMX_MODE();
3664 IEM_MC_REF_MXCSR(pfMxcsr);
3665
3666 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3667 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3668 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3669 } IEM_MC_ELSE() {
3670 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3671 } IEM_MC_ENDIF();
3672
3673 IEM_MC_ADVANCE_RIP_AND_FINISH();
3674 IEM_MC_END();
3675 }
3676}
3677
3678
3679/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3680FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3681{
3682 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3684 if (IEM_IS_MODRM_REG_MODE(bRm))
3685 {
3686 /*
3687 * XMM, MMX
3688 */
3689 IEM_MC_BEGIN(3, 1, 0, 0);
3690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3691 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3692 IEM_MC_LOCAL(X86XMMREG, Dst);
3693 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3694 IEM_MC_ARG(uint64_t, u64Src, 2);
3695 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3696 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3697 IEM_MC_PREPARE_FPU_USAGE();
3698 IEM_MC_FPU_TO_MMX_MODE();
3699
3700 IEM_MC_REF_MXCSR(pfMxcsr);
3701 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3702
3703 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3704 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3705 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3706 } IEM_MC_ELSE() {
3707 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3708 } IEM_MC_ENDIF();
3709
3710 IEM_MC_ADVANCE_RIP_AND_FINISH();
3711 IEM_MC_END();
3712 }
3713 else
3714 {
3715 /*
3716 * XMM, [mem64]
3717 */
3718 IEM_MC_BEGIN(3, 3, 0, 0);
3719 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3720 IEM_MC_LOCAL(X86XMMREG, Dst);
3721 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3722 IEM_MC_ARG(uint64_t, u64Src, 2);
3723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3724
3725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3727 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3728 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3729 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3730
3731 /* Doesn't cause a transition to MMX mode. */
3732 IEM_MC_PREPARE_SSE_USAGE();
3733 IEM_MC_REF_MXCSR(pfMxcsr);
3734
3735 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3736 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3737 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3738 } IEM_MC_ELSE() {
3739 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3740 } IEM_MC_ENDIF();
3741
3742 IEM_MC_ADVANCE_RIP_AND_FINISH();
3743 IEM_MC_END();
3744 }
3745}
3746
3747
3748/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3749FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3750{
3751 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3752
3753 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3754 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3755 {
3756 if (IEM_IS_MODRM_REG_MODE(bRm))
3757 {
3758 /* XMM, greg64 */
3759 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3760 IEM_MC_LOCAL(uint32_t, fMxcsr);
3761 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3762 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3763 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3764 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3765
3766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3767 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3768 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3769
3770 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3771 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3772 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3773 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3774 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3775 } IEM_MC_ELSE() {
3776 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3777 } IEM_MC_ENDIF();
3778
3779 IEM_MC_ADVANCE_RIP_AND_FINISH();
3780 IEM_MC_END();
3781 }
3782 else
3783 {
3784 /* XMM, [mem64] */
3785 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
3786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3787 IEM_MC_LOCAL(uint32_t, fMxcsr);
3788 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3789 IEM_MC_LOCAL(int64_t, i64Src);
3790 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3791 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3792 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3793
3794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3796 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3797 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3798
3799 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3800 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3801 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3802 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3803 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3804 } IEM_MC_ELSE() {
3805 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3806 } IEM_MC_ENDIF();
3807
3808 IEM_MC_ADVANCE_RIP_AND_FINISH();
3809 IEM_MC_END();
3810 }
3811 }
3812 else
3813 {
3814 if (IEM_IS_MODRM_REG_MODE(bRm))
3815 {
3816 /* greg, XMM */
3817 IEM_MC_BEGIN(3, 2, 0, 0);
3818 IEM_MC_LOCAL(uint32_t, fMxcsr);
3819 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3820 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3821 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3822 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3823
3824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3825 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3826 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3827
3828 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3829 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3830 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3831 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3832 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3833 } IEM_MC_ELSE() {
3834 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3835 } IEM_MC_ENDIF();
3836
3837 IEM_MC_ADVANCE_RIP_AND_FINISH();
3838 IEM_MC_END();
3839 }
3840 else
3841 {
3842 /* greg, [mem32] */
3843 IEM_MC_BEGIN(3, 4, 0, 0);
3844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3845 IEM_MC_LOCAL(uint32_t, fMxcsr);
3846 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3847 IEM_MC_LOCAL(int32_t, i32Src);
3848 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3849 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3850 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3851
3852 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3854 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3855 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3856
3857 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3858 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3859 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3860 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3861 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3862 } IEM_MC_ELSE() {
3863 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3864 } IEM_MC_ENDIF();
3865
3866 IEM_MC_ADVANCE_RIP_AND_FINISH();
3867 IEM_MC_END();
3868 }
3869 }
3870}
3871
3872
3873/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3874FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3875{
3876 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3877
3878 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3879 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3880 {
3881 if (IEM_IS_MODRM_REG_MODE(bRm))
3882 {
3883 /* XMM, greg64 */
3884 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3885 IEM_MC_LOCAL(uint32_t, fMxcsr);
3886 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3887 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3888 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3889 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3890
3891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3892 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3893 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3894
3895 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3896 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3897 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3898 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3899 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3900 } IEM_MC_ELSE() {
3901 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3902 } IEM_MC_ENDIF();
3903
3904 IEM_MC_ADVANCE_RIP_AND_FINISH();
3905 IEM_MC_END();
3906 }
3907 else
3908 {
3909 /* XMM, [mem64] */
3910 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
3911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3912 IEM_MC_LOCAL(uint32_t, fMxcsr);
3913 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3914 IEM_MC_LOCAL(int64_t, i64Src);
3915 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3916 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3917 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3918
3919 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3921 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3922 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3923
3924 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3925 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3926 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3927 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3928 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3929 } IEM_MC_ELSE() {
3930 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3931 } IEM_MC_ENDIF();
3932
3933 IEM_MC_ADVANCE_RIP_AND_FINISH();
3934 IEM_MC_END();
3935 }
3936 }
3937 else
3938 {
3939 if (IEM_IS_MODRM_REG_MODE(bRm))
3940 {
3941 /* XMM, greg32 */
3942 IEM_MC_BEGIN(3, 2, 0, 0);
3943 IEM_MC_LOCAL(uint32_t, fMxcsr);
3944 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3945 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3946 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3947 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3948
3949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3950 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3951 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3952
3953 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3954 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3955 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3956 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3957 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3958 } IEM_MC_ELSE() {
3959 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3960 } IEM_MC_ENDIF();
3961
3962 IEM_MC_ADVANCE_RIP_AND_FINISH();
3963 IEM_MC_END();
3964 }
3965 else
3966 {
3967 /* XMM, [mem32] */
3968 IEM_MC_BEGIN(3, 4, 0, 0);
3969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3970 IEM_MC_LOCAL(uint32_t, fMxcsr);
3971 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3972 IEM_MC_LOCAL(int32_t, i32Src);
3973 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3974 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3975 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3976
3977 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3979 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3980 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3981
3982 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3983 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3984 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3985 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3986 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3987 } IEM_MC_ELSE() {
3988 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3989 } IEM_MC_ENDIF();
3990
3991 IEM_MC_ADVANCE_RIP_AND_FINISH();
3992 IEM_MC_END();
3993 }
3994 }
3995}
3996
3997
3998/**
3999 * @opcode 0x2b
4000 * @opcodesub !11 mr/reg
4001 * @oppfx none
4002 * @opcpuid sse
4003 * @opgroup og_sse1_cachect
4004 * @opxcpttype 1
4005 * @optest op1=1 op2=2 -> op1=2
4006 * @optest op1=0 op2=-42 -> op1=-42
4007 */
4008FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
4009{
4010 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4011 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4012 if (IEM_IS_MODRM_MEM_MODE(bRm))
4013 {
4014 /*
4015 * memory, register.
4016 */
4017 IEM_MC_BEGIN(0, 2, 0, 0);
4018 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4020
4021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4023 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4024 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4025
4026 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4027 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4028
4029 IEM_MC_ADVANCE_RIP_AND_FINISH();
4030 IEM_MC_END();
4031 }
4032 /* The register, register encoding is invalid. */
4033 else
4034 IEMOP_RAISE_INVALID_OPCODE_RET();
4035}
4036
4037/**
4038 * @opcode 0x2b
4039 * @opcodesub !11 mr/reg
4040 * @oppfx 0x66
4041 * @opcpuid sse2
4042 * @opgroup og_sse2_cachect
4043 * @opxcpttype 1
4044 * @optest op1=1 op2=2 -> op1=2
4045 * @optest op1=0 op2=-42 -> op1=-42
4046 */
4047FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
4048{
4049 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4050 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4051 if (IEM_IS_MODRM_MEM_MODE(bRm))
4052 {
4053 /*
4054 * memory, register.
4055 */
4056 IEM_MC_BEGIN(0, 2, 0, 0);
4057 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4059
4060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4062 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4063 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4064
4065 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4066 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4067
4068 IEM_MC_ADVANCE_RIP_AND_FINISH();
4069 IEM_MC_END();
4070 }
4071 /* The register, register encoding is invalid. */
4072 else
4073 IEMOP_RAISE_INVALID_OPCODE_RET();
4074}
4075/* Opcode 0xf3 0x0f 0x2b - invalid */
4076/* Opcode 0xf2 0x0f 0x2b - invalid */
4077
4078
4079/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
4080FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
4081{
4082 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4083 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4084 if (IEM_IS_MODRM_REG_MODE(bRm))
4085 {
4086 /*
4087 * Register, register.
4088 */
4089 IEM_MC_BEGIN(3, 1, 0, 0);
4090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4091 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4092 IEM_MC_LOCAL(uint64_t, u64Dst);
4093 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4094 IEM_MC_ARG(uint64_t, u64Src, 2);
4095 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4096 IEM_MC_PREPARE_FPU_USAGE();
4097 IEM_MC_FPU_TO_MMX_MODE();
4098
4099 IEM_MC_REF_MXCSR(pfMxcsr);
4100 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4101
4102 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4103 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4104 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4105 } IEM_MC_ELSE() {
4106 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4107 } IEM_MC_ENDIF();
4108
4109 IEM_MC_ADVANCE_RIP_AND_FINISH();
4110 IEM_MC_END();
4111 }
4112 else
4113 {
4114 /*
4115 * Register, memory.
4116 */
4117 IEM_MC_BEGIN(3, 2, 0, 0);
4118 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4119 IEM_MC_LOCAL(uint64_t, u64Dst);
4120 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4121 IEM_MC_ARG(uint64_t, u64Src, 2);
4122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4123
4124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4126 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4127 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4128
4129 IEM_MC_PREPARE_FPU_USAGE();
4130 IEM_MC_FPU_TO_MMX_MODE();
4131 IEM_MC_REF_MXCSR(pfMxcsr);
4132
4133 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4134 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4135 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4136 } IEM_MC_ELSE() {
4137 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4138 } IEM_MC_ENDIF();
4139
4140 IEM_MC_ADVANCE_RIP_AND_FINISH();
4141 IEM_MC_END();
4142 }
4143}
4144
4145
4146/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4147FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4148{
4149 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4151 if (IEM_IS_MODRM_REG_MODE(bRm))
4152 {
4153 /*
4154 * Register, register.
4155 */
4156 IEM_MC_BEGIN(3, 1, 0, 0);
4157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4158 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4159 IEM_MC_LOCAL(uint64_t, u64Dst);
4160 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4161 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4162 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4163 IEM_MC_PREPARE_FPU_USAGE();
4164 IEM_MC_FPU_TO_MMX_MODE();
4165
4166 IEM_MC_REF_MXCSR(pfMxcsr);
4167 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4168
4169 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4170 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4171 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4172 } IEM_MC_ELSE() {
4173 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4174 } IEM_MC_ENDIF();
4175
4176 IEM_MC_ADVANCE_RIP_AND_FINISH();
4177 IEM_MC_END();
4178 }
4179 else
4180 {
4181 /*
4182 * Register, memory.
4183 */
4184 IEM_MC_BEGIN(3, 3, 0, 0);
4185 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4186 IEM_MC_LOCAL(uint64_t, u64Dst);
4187 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4188 IEM_MC_LOCAL(X86XMMREG, uSrc);
4189 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4191
4192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4194 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4195 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4196
4197 IEM_MC_PREPARE_FPU_USAGE();
4198 IEM_MC_FPU_TO_MMX_MODE();
4199
4200 IEM_MC_REF_MXCSR(pfMxcsr);
4201
4202 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4203 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4204 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4205 } IEM_MC_ELSE() {
4206 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4207 } IEM_MC_ENDIF();
4208
4209 IEM_MC_ADVANCE_RIP_AND_FINISH();
4210 IEM_MC_END();
4211 }
4212}
4213
4214
4215/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4216FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4217{
4218 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4219
4220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4221 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4222 {
4223 if (IEM_IS_MODRM_REG_MODE(bRm))
4224 {
4225 /* greg64, XMM */
4226 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4227 IEM_MC_LOCAL(uint32_t, fMxcsr);
4228 IEM_MC_LOCAL(int64_t, i64Dst);
4229 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4230 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4231 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4232
4233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4234 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4235 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4236
4237 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4238 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4239 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4240 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4241 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4242 } IEM_MC_ELSE() {
4243 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4244 } IEM_MC_ENDIF();
4245
4246 IEM_MC_ADVANCE_RIP_AND_FINISH();
4247 IEM_MC_END();
4248 }
4249 else
4250 {
4251 /* greg64, [mem64] */
4252 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4254 IEM_MC_LOCAL(uint32_t, fMxcsr);
4255 IEM_MC_LOCAL(int64_t, i64Dst);
4256 IEM_MC_LOCAL(uint32_t, u32Src);
4257 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4258 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4259 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4260
4261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4263 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4264 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4265
4266 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4267 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4268 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4269 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4270 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4271 } IEM_MC_ELSE() {
4272 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4273 } IEM_MC_ENDIF();
4274
4275 IEM_MC_ADVANCE_RIP_AND_FINISH();
4276 IEM_MC_END();
4277 }
4278 }
4279 else
4280 {
4281 if (IEM_IS_MODRM_REG_MODE(bRm))
4282 {
4283 /* greg, XMM */
4284 IEM_MC_BEGIN(3, 2, 0, 0);
4285 IEM_MC_LOCAL(uint32_t, fMxcsr);
4286 IEM_MC_LOCAL(int32_t, i32Dst);
4287 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4288 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4289 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4290
4291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4292 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4293 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4294
4295 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4296 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4297 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4298 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4299 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4300 } IEM_MC_ELSE() {
4301 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4302 } IEM_MC_ENDIF();
4303
4304 IEM_MC_ADVANCE_RIP_AND_FINISH();
4305 IEM_MC_END();
4306 }
4307 else
4308 {
4309 /* greg, [mem] */
4310 IEM_MC_BEGIN(3, 4, 0, 0);
4311 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4312 IEM_MC_LOCAL(uint32_t, fMxcsr);
4313 IEM_MC_LOCAL(int32_t, i32Dst);
4314 IEM_MC_LOCAL(uint32_t, u32Src);
4315 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4316 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4317 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4318
4319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4321 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4322 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4323
4324 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4325 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4326 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4327 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4328 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4329 } IEM_MC_ELSE() {
4330 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4331 } IEM_MC_ENDIF();
4332
4333 IEM_MC_ADVANCE_RIP_AND_FINISH();
4334 IEM_MC_END();
4335 }
4336 }
4337}
4338
4339
4340/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4341FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4342{
4343 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4344
4345 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4346 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4347 {
4348 if (IEM_IS_MODRM_REG_MODE(bRm))
4349 {
4350 /* greg64, XMM */
4351 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4352 IEM_MC_LOCAL(uint32_t, fMxcsr);
4353 IEM_MC_LOCAL(int64_t, i64Dst);
4354 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4355 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4356 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4357
4358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4359 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4360 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4361
4362 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4363 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4364 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4365 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4366 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4367 } IEM_MC_ELSE() {
4368 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4369 } IEM_MC_ENDIF();
4370
4371 IEM_MC_ADVANCE_RIP_AND_FINISH();
4372 IEM_MC_END();
4373 }
4374 else
4375 {
4376 /* greg64, [mem64] */
4377 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4378 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4379 IEM_MC_LOCAL(uint32_t, fMxcsr);
4380 IEM_MC_LOCAL(int64_t, i64Dst);
4381 IEM_MC_LOCAL(uint64_t, u64Src);
4382 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4383 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4384 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4385
4386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4388 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4389 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4390
4391 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4392 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4393 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4394 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4395 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4396 } IEM_MC_ELSE() {
4397 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4398 } IEM_MC_ENDIF();
4399
4400 IEM_MC_ADVANCE_RIP_AND_FINISH();
4401 IEM_MC_END();
4402 }
4403 }
4404 else
4405 {
4406 if (IEM_IS_MODRM_REG_MODE(bRm))
4407 {
4408 /* greg, XMM */
4409 IEM_MC_BEGIN(3, 2, 0, 0);
4410 IEM_MC_LOCAL(uint32_t, fMxcsr);
4411 IEM_MC_LOCAL(int32_t, i32Dst);
4412 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4413 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4414 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4415
4416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4417 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4418 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4419
4420 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4421 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4422 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4423 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4424 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4425 } IEM_MC_ELSE() {
4426 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4427 } IEM_MC_ENDIF();
4428
4429 IEM_MC_ADVANCE_RIP_AND_FINISH();
4430 IEM_MC_END();
4431 }
4432 else
4433 {
4434 /* greg32, [mem32] */
4435 IEM_MC_BEGIN(3, 4, 0, 0);
4436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4437 IEM_MC_LOCAL(uint32_t, fMxcsr);
4438 IEM_MC_LOCAL(int32_t, i32Dst);
4439 IEM_MC_LOCAL(uint64_t, u64Src);
4440 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4441 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4442 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4443
4444 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4446 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4447 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4448
4449 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4450 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4451 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4452 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4453 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4454 } IEM_MC_ELSE() {
4455 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4456 } IEM_MC_ENDIF();
4457
4458 IEM_MC_ADVANCE_RIP_AND_FINISH();
4459 IEM_MC_END();
4460 }
4461 }
4462}
4463
4464
4465/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4466FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4467{
4468 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4469 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4470 if (IEM_IS_MODRM_REG_MODE(bRm))
4471 {
4472 /*
4473 * Register, register.
4474 */
4475 IEM_MC_BEGIN(3, 1, 0, 0);
4476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4477 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4478 IEM_MC_LOCAL(uint64_t, u64Dst);
4479 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4480 IEM_MC_ARG(uint64_t, u64Src, 2);
4481
4482 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4483 IEM_MC_PREPARE_FPU_USAGE();
4484 IEM_MC_FPU_TO_MMX_MODE();
4485
4486 IEM_MC_REF_MXCSR(pfMxcsr);
4487 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4488
4489 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4490 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4491 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4492 } IEM_MC_ELSE() {
4493 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4494 } IEM_MC_ENDIF();
4495
4496 IEM_MC_ADVANCE_RIP_AND_FINISH();
4497 IEM_MC_END();
4498 }
4499 else
4500 {
4501 /*
4502 * Register, memory.
4503 */
4504 IEM_MC_BEGIN(3, 2, 0, 0);
4505 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4506 IEM_MC_LOCAL(uint64_t, u64Dst);
4507 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4508 IEM_MC_ARG(uint64_t, u64Src, 2);
4509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4510
4511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4513 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4514 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4515
4516 IEM_MC_PREPARE_FPU_USAGE();
4517 IEM_MC_FPU_TO_MMX_MODE();
4518 IEM_MC_REF_MXCSR(pfMxcsr);
4519
4520 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4521 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4522 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4523 } IEM_MC_ELSE() {
4524 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4525 } IEM_MC_ENDIF();
4526
4527 IEM_MC_ADVANCE_RIP_AND_FINISH();
4528 IEM_MC_END();
4529 }
4530}
4531
4532
4533/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4534FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4535{
4536 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4538 if (IEM_IS_MODRM_REG_MODE(bRm))
4539 {
4540 /*
4541 * Register, register.
4542 */
4543 IEM_MC_BEGIN(3, 1, 0, 0);
4544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4545 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4546 IEM_MC_LOCAL(uint64_t, u64Dst);
4547 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4548 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4549
4550 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4551 IEM_MC_PREPARE_FPU_USAGE();
4552 IEM_MC_FPU_TO_MMX_MODE();
4553
4554 IEM_MC_REF_MXCSR(pfMxcsr);
4555 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4556
4557 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4558 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4559 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4560 } IEM_MC_ELSE() {
4561 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4562 } IEM_MC_ENDIF();
4563
4564 IEM_MC_ADVANCE_RIP_AND_FINISH();
4565 IEM_MC_END();
4566 }
4567 else
4568 {
4569 /*
4570 * Register, memory.
4571 */
4572 IEM_MC_BEGIN(3, 3, 0, 0);
4573 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4574 IEM_MC_LOCAL(uint64_t, u64Dst);
4575 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4576 IEM_MC_LOCAL(X86XMMREG, uSrc);
4577 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4579
4580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4582 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4583 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4584
4585 IEM_MC_PREPARE_FPU_USAGE();
4586 IEM_MC_FPU_TO_MMX_MODE();
4587
4588 IEM_MC_REF_MXCSR(pfMxcsr);
4589
4590 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4591 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4592 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4593 } IEM_MC_ELSE() {
4594 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4595 } IEM_MC_ENDIF();
4596
4597 IEM_MC_ADVANCE_RIP_AND_FINISH();
4598 IEM_MC_END();
4599 }
4600}
4601
4602
4603/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4604FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4605{
4606 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4607
4608 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4609 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4610 {
4611 if (IEM_IS_MODRM_REG_MODE(bRm))
4612 {
4613 /* greg64, XMM */
4614 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4615 IEM_MC_LOCAL(uint32_t, fMxcsr);
4616 IEM_MC_LOCAL(int64_t, i64Dst);
4617 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4618 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4619 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4620
4621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4622 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4623 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4624
4625 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4626 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4627 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4628 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4629 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4630 } IEM_MC_ELSE() {
4631 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4632 } IEM_MC_ENDIF();
4633
4634 IEM_MC_ADVANCE_RIP_AND_FINISH();
4635 IEM_MC_END();
4636 }
4637 else
4638 {
4639 /* greg64, [mem64] */
4640 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4642 IEM_MC_LOCAL(uint32_t, fMxcsr);
4643 IEM_MC_LOCAL(int64_t, i64Dst);
4644 IEM_MC_LOCAL(uint32_t, u32Src);
4645 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4646 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4647 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4648
4649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4651 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4652 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4653
4654 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4655 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4656 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4657 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4658 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4659 } IEM_MC_ELSE() {
4660 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4661 } IEM_MC_ENDIF();
4662
4663 IEM_MC_ADVANCE_RIP_AND_FINISH();
4664 IEM_MC_END();
4665 }
4666 }
4667 else
4668 {
4669 if (IEM_IS_MODRM_REG_MODE(bRm))
4670 {
4671 /* greg, XMM */
4672 IEM_MC_BEGIN(3, 2, 0, 0);
4673 IEM_MC_LOCAL(uint32_t, fMxcsr);
4674 IEM_MC_LOCAL(int32_t, i32Dst);
4675 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4676 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4677 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4678
4679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4680 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4681 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4682
4683 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4684 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4685 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4686 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4687 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4688 } IEM_MC_ELSE() {
4689 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4690 } IEM_MC_ENDIF();
4691
4692 IEM_MC_ADVANCE_RIP_AND_FINISH();
4693 IEM_MC_END();
4694 }
4695 else
4696 {
4697 /* greg, [mem] */
4698 IEM_MC_BEGIN(3, 4, 0, 0);
4699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4700 IEM_MC_LOCAL(uint32_t, fMxcsr);
4701 IEM_MC_LOCAL(int32_t, i32Dst);
4702 IEM_MC_LOCAL(uint32_t, u32Src);
4703 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4704 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4705 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4706
4707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4709 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4710 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4711
4712 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4713 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4714 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4715 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4716 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4717 } IEM_MC_ELSE() {
4718 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4719 } IEM_MC_ENDIF();
4720
4721 IEM_MC_ADVANCE_RIP_AND_FINISH();
4722 IEM_MC_END();
4723 }
4724 }
4725}
4726
4727
4728/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4729FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4730{
4731 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4732
4733 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4734 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4735 {
4736 if (IEM_IS_MODRM_REG_MODE(bRm))
4737 {
4738 /* greg64, XMM */
4739 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4740 IEM_MC_LOCAL(uint32_t, fMxcsr);
4741 IEM_MC_LOCAL(int64_t, i64Dst);
4742 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4743 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4744 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4745
4746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4747 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4748 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4749
4750 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4751 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4752 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4753 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4754 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4755 } IEM_MC_ELSE() {
4756 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4757 } IEM_MC_ENDIF();
4758
4759 IEM_MC_ADVANCE_RIP_AND_FINISH();
4760 IEM_MC_END();
4761 }
4762 else
4763 {
4764 /* greg64, [mem64] */
4765 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4767 IEM_MC_LOCAL(uint32_t, fMxcsr);
4768 IEM_MC_LOCAL(int64_t, i64Dst);
4769 IEM_MC_LOCAL(uint64_t, u64Src);
4770 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4771 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4772 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4773
4774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4776 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4777 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4778
4779 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4780 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4781 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4782 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4783 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4784 } IEM_MC_ELSE() {
4785 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4786 } IEM_MC_ENDIF();
4787
4788 IEM_MC_ADVANCE_RIP_AND_FINISH();
4789 IEM_MC_END();
4790 }
4791 }
4792 else
4793 {
4794 if (IEM_IS_MODRM_REG_MODE(bRm))
4795 {
4796 /* greg32, XMM */
4797 IEM_MC_BEGIN(3, 2, 0, 0);
4798 IEM_MC_LOCAL(uint32_t, fMxcsr);
4799 IEM_MC_LOCAL(int32_t, i32Dst);
4800 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4801 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4802 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4803
4804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4805 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4806 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4807
4808 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4809 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4810 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4811 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4812 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4813 } IEM_MC_ELSE() {
4814 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4815 } IEM_MC_ENDIF();
4816
4817 IEM_MC_ADVANCE_RIP_AND_FINISH();
4818 IEM_MC_END();
4819 }
4820 else
4821 {
4822 /* greg32, [mem64] */
4823 IEM_MC_BEGIN(3, 4, 0, 0);
4824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4825 IEM_MC_LOCAL(uint32_t, fMxcsr);
4826 IEM_MC_LOCAL(int32_t, i32Dst);
4827 IEM_MC_LOCAL(uint64_t, u64Src);
4828 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4829 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4830 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4831
4832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4834 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4835 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4836
4837 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4838 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4839 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4840 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4841 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4842 } IEM_MC_ELSE() {
4843 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4844 } IEM_MC_ENDIF();
4845
4846 IEM_MC_ADVANCE_RIP_AND_FINISH();
4847 IEM_MC_END();
4848 }
4849 }
4850}
4851
4852
4853/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
4854FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4855{
4856 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4858 if (IEM_IS_MODRM_REG_MODE(bRm))
4859 {
4860 /*
4861 * Register, register.
4862 */
4863 IEM_MC_BEGIN(4, 1, 0, 0);
4864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4865 IEM_MC_LOCAL(uint32_t, fEFlags);
4866 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4867 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4868 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4869 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4870 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4871 IEM_MC_PREPARE_SSE_USAGE();
4872 IEM_MC_FETCH_EFLAGS(fEFlags);
4873 IEM_MC_REF_MXCSR(pfMxcsr);
4874 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4875 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4876 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4877 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4878 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4879 } IEM_MC_ELSE() {
4880 IEM_MC_COMMIT_EFLAGS(fEFlags);
4881 } IEM_MC_ENDIF();
4882
4883 IEM_MC_ADVANCE_RIP_AND_FINISH();
4884 IEM_MC_END();
4885 }
4886 else
4887 {
4888 /*
4889 * Register, memory.
4890 */
4891 IEM_MC_BEGIN(4, 3, 0, 0);
4892 IEM_MC_LOCAL(uint32_t, fEFlags);
4893 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4894 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4895 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4896 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4897 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4898 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4899
4900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4902 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4903 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4904
4905 IEM_MC_PREPARE_SSE_USAGE();
4906 IEM_MC_FETCH_EFLAGS(fEFlags);
4907 IEM_MC_REF_MXCSR(pfMxcsr);
4908 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4909 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4910 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4911 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4912 } IEM_MC_ELSE() {
4913 IEM_MC_COMMIT_EFLAGS(fEFlags);
4914 } IEM_MC_ENDIF();
4915
4916 IEM_MC_ADVANCE_RIP_AND_FINISH();
4917 IEM_MC_END();
4918 }
4919}
4920
4921
4922/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
4923FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4924{
4925 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4926 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4927 if (IEM_IS_MODRM_REG_MODE(bRm))
4928 {
4929 /*
4930 * Register, register.
4931 */
4932 IEM_MC_BEGIN(4, 1, 0, 0);
4933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4934 IEM_MC_LOCAL(uint32_t, fEFlags);
4935 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4936 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4937 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4938 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4939 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4940 IEM_MC_PREPARE_SSE_USAGE();
4941 IEM_MC_FETCH_EFLAGS(fEFlags);
4942 IEM_MC_REF_MXCSR(pfMxcsr);
4943 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4944 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4945 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4946 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4947 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4948 } IEM_MC_ELSE() {
4949 IEM_MC_COMMIT_EFLAGS(fEFlags);
4950 } IEM_MC_ENDIF();
4951
4952 IEM_MC_ADVANCE_RIP_AND_FINISH();
4953 IEM_MC_END();
4954 }
4955 else
4956 {
4957 /*
4958 * Register, memory.
4959 */
4960 IEM_MC_BEGIN(4, 3, 0, 0);
4961 IEM_MC_LOCAL(uint32_t, fEFlags);
4962 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4963 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4964 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4965 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4966 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4968
4969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4971 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4972 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4973
4974 IEM_MC_PREPARE_SSE_USAGE();
4975 IEM_MC_FETCH_EFLAGS(fEFlags);
4976 IEM_MC_REF_MXCSR(pfMxcsr);
4977 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4978 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4979 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4980 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4981 } IEM_MC_ELSE() {
4982 IEM_MC_COMMIT_EFLAGS(fEFlags);
4983 } IEM_MC_ENDIF();
4984
4985 IEM_MC_ADVANCE_RIP_AND_FINISH();
4986 IEM_MC_END();
4987 }
4988}
4989
4990
4991/* Opcode 0xf3 0x0f 0x2e - invalid */
4992/* Opcode 0xf2 0x0f 0x2e - invalid */
4993
4994
4995/** Opcode 0x0f 0x2f - comiss Vss, Wss */
4996FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
4997{
4998 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4999 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5000 if (IEM_IS_MODRM_REG_MODE(bRm))
5001 {
5002 /*
5003 * Register, register.
5004 */
5005 IEM_MC_BEGIN(4, 1, 0, 0);
5006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5007 IEM_MC_LOCAL(uint32_t, fEFlags);
5008 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5009 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5010 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5011 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5012 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5013 IEM_MC_PREPARE_SSE_USAGE();
5014 IEM_MC_FETCH_EFLAGS(fEFlags);
5015 IEM_MC_REF_MXCSR(pfMxcsr);
5016 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5017 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5018 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5019 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5020 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5021 } IEM_MC_ELSE() {
5022 IEM_MC_COMMIT_EFLAGS(fEFlags);
5023 } IEM_MC_ENDIF();
5024
5025 IEM_MC_ADVANCE_RIP_AND_FINISH();
5026 IEM_MC_END();
5027 }
5028 else
5029 {
5030 /*
5031 * Register, memory.
5032 */
5033 IEM_MC_BEGIN(4, 3, 0, 0);
5034 IEM_MC_LOCAL(uint32_t, fEFlags);
5035 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5036 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5037 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5038 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5039 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5041
5042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5044 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5045 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5046
5047 IEM_MC_PREPARE_SSE_USAGE();
5048 IEM_MC_FETCH_EFLAGS(fEFlags);
5049 IEM_MC_REF_MXCSR(pfMxcsr);
5050 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5051 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5052 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5053 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5054 } IEM_MC_ELSE() {
5055 IEM_MC_COMMIT_EFLAGS(fEFlags);
5056 } IEM_MC_ENDIF();
5057
5058 IEM_MC_ADVANCE_RIP_AND_FINISH();
5059 IEM_MC_END();
5060 }
5061}
5062
5063
5064/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
5065FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
5066{
5067 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5068 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5069 if (IEM_IS_MODRM_REG_MODE(bRm))
5070 {
5071 /*
5072 * Register, register.
5073 */
5074 IEM_MC_BEGIN(4, 1, 0, 0);
5075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5076 IEM_MC_LOCAL(uint32_t, fEFlags);
5077 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5078 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5079 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5080 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5081 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5082 IEM_MC_PREPARE_SSE_USAGE();
5083 IEM_MC_FETCH_EFLAGS(fEFlags);
5084 IEM_MC_REF_MXCSR(pfMxcsr);
5085 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5086 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5087 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5088 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5089 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5090 } IEM_MC_ELSE() {
5091 IEM_MC_COMMIT_EFLAGS(fEFlags);
5092 } IEM_MC_ENDIF();
5093
5094 IEM_MC_ADVANCE_RIP_AND_FINISH();
5095 IEM_MC_END();
5096 }
5097 else
5098 {
5099 /*
5100 * Register, memory.
5101 */
5102 IEM_MC_BEGIN(4, 3, 0, 0);
5103 IEM_MC_LOCAL(uint32_t, fEFlags);
5104 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5105 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5106 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5107 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5108 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5110
5111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5113 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5114 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5115
5116 IEM_MC_PREPARE_SSE_USAGE();
5117 IEM_MC_FETCH_EFLAGS(fEFlags);
5118 IEM_MC_REF_MXCSR(pfMxcsr);
5119 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5120 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5121 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5122 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5123 } IEM_MC_ELSE() {
5124 IEM_MC_COMMIT_EFLAGS(fEFlags);
5125 } IEM_MC_ENDIF();
5126
5127 IEM_MC_ADVANCE_RIP_AND_FINISH();
5128 IEM_MC_END();
5129 }
5130}
5131
5132
5133/* Opcode 0xf3 0x0f 0x2f - invalid */
5134/* Opcode 0xf2 0x0f 0x2f - invalid */
5135
5136/** Opcode 0x0f 0x30. */
5137FNIEMOP_DEF(iemOp_wrmsr)
5138{
5139 IEMOP_MNEMONIC(wrmsr, "wrmsr");
5140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5141 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_wrmsr);
5142}
5143
5144
5145/** Opcode 0x0f 0x31. */
5146FNIEMOP_DEF(iemOp_rdtsc)
5147{
5148 IEMOP_MNEMONIC(rdtsc, "rdtsc");
5149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5150 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdtsc);
5151}
5152
5153
5154/** Opcode 0x0f 0x33. */
5155FNIEMOP_DEF(iemOp_rdmsr)
5156{
5157 IEMOP_MNEMONIC(rdmsr, "rdmsr");
5158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5159 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdmsr);
5160}
5161
5162
5163/** Opcode 0x0f 0x34. */
5164FNIEMOP_DEF(iemOp_rdpmc)
5165{
5166 IEMOP_MNEMONIC(rdpmc, "rdpmc");
5167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5168 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdpmc);
5169}
5170
5171
5172/** Opcode 0x0f 0x34. */
5173FNIEMOP_DEF(iemOp_sysenter)
5174{
5175 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5177 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
5178 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
5179 iemCImpl_sysenter);
5180}
5181
5182/** Opcode 0x0f 0x35. */
5183FNIEMOP_DEF(iemOp_sysexit)
5184{
5185 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5187 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
5188 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
5189 iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
5190}
5191
5192/** Opcode 0x0f 0x37. */
5193FNIEMOP_STUB(iemOp_getsec);
5194
5195
5196/** Opcode 0x0f 0x38. */
5197FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
5198{
5199#ifdef IEM_WITH_THREE_0F_38
5200 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5201 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5202#else
5203 IEMOP_BITCH_ABOUT_STUB();
5204 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5205#endif
5206}
5207
5208
5209/** Opcode 0x0f 0x3a. */
5210FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
5211{
5212#ifdef IEM_WITH_THREE_0F_3A
5213 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5214 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5215#else
5216 IEMOP_BITCH_ABOUT_STUB();
5217 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5218#endif
5219}
5220
5221
5222/**
5223 * Implements a conditional move.
5224 *
5225 * Wish there was an obvious way to do this where we could share and reduce
5226 * code bloat.
5227 *
5228 * @param a_Cnd The conditional "microcode" operation.
5229 */
5230#define CMOV_X(a_Cnd) \
5231 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5232 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5233 { \
5234 switch (pVCpu->iem.s.enmEffOpSize) \
5235 { \
5236 case IEMMODE_16BIT: \
5237 IEM_MC_BEGIN(0, 1, 0, 0); \
5238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5239 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5240 a_Cnd { \
5241 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5242 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5243 } IEM_MC_ENDIF(); \
5244 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5245 IEM_MC_END(); \
5246 break; \
5247 \
5248 case IEMMODE_32BIT: \
5249 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0); \
5250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5251 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5252 a_Cnd { \
5253 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5254 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5255 } IEM_MC_ELSE() { \
5256 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5257 } IEM_MC_ENDIF(); \
5258 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5259 IEM_MC_END(); \
5260 break; \
5261 \
5262 case IEMMODE_64BIT: \
5263 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0); \
5264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5265 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5266 a_Cnd { \
5267 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5268 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5269 } IEM_MC_ENDIF(); \
5270 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5271 IEM_MC_END(); \
5272 break; \
5273 \
5274 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5275 } \
5276 } \
5277 else \
5278 { \
5279 switch (pVCpu->iem.s.enmEffOpSize) \
5280 { \
5281 case IEMMODE_16BIT: \
5282 IEM_MC_BEGIN(0, 2, 0, 0); \
5283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5284 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5287 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5288 a_Cnd { \
5289 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5290 } IEM_MC_ENDIF(); \
5291 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5292 IEM_MC_END(); \
5293 break; \
5294 \
5295 case IEMMODE_32BIT: \
5296 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0); \
5297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5298 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5301 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5302 a_Cnd { \
5303 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5304 } IEM_MC_ELSE() { \
5305 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5306 } IEM_MC_ENDIF(); \
5307 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5308 IEM_MC_END(); \
5309 break; \
5310 \
5311 case IEMMODE_64BIT: \
5312 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0); \
5313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5314 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5317 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5318 a_Cnd { \
5319 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5320 } IEM_MC_ENDIF(); \
5321 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5322 IEM_MC_END(); \
5323 break; \
5324 \
5325 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5326 } \
5327 } do {} while (0)
5328
5329
5330
5331/** Opcode 0x0f 0x40. */
5332FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5333{
5334 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5335 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5336}
5337
5338
5339/** Opcode 0x0f 0x41. */
5340FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5341{
5342 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5343 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5344}
5345
5346
5347/** Opcode 0x0f 0x42. */
5348FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5349{
5350 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5351 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5352}
5353
5354
5355/** Opcode 0x0f 0x43. */
5356FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5357{
5358 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5359 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5360}
5361
5362
5363/** Opcode 0x0f 0x44. */
5364FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5365{
5366 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5367 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5368}
5369
5370
5371/** Opcode 0x0f 0x45. */
5372FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5373{
5374 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5375 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5376}
5377
5378
5379/** Opcode 0x0f 0x46. */
5380FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5381{
5382 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5383 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5384}
5385
5386
5387/** Opcode 0x0f 0x47. */
5388FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5389{
5390 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5391 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5392}
5393
5394
5395/** Opcode 0x0f 0x48. */
5396FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5397{
5398 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5399 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5400}
5401
5402
5403/** Opcode 0x0f 0x49. */
5404FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5405{
5406 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5407 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5408}
5409
5410
5411/** Opcode 0x0f 0x4a. */
5412FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5413{
5414 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5415 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5416}
5417
5418
5419/** Opcode 0x0f 0x4b. */
5420FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5421{
5422 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5423 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5424}
5425
5426
5427/** Opcode 0x0f 0x4c. */
5428FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5429{
5430 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5431 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5432}
5433
5434
5435/** Opcode 0x0f 0x4d. */
5436FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5437{
5438 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5439 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5440}
5441
5442
5443/** Opcode 0x0f 0x4e. */
5444FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5445{
5446 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5447 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5448}
5449
5450
5451/** Opcode 0x0f 0x4f. */
5452FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5453{
5454 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5455 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5456}
5457
5458#undef CMOV_X
5459
5460/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5461FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5462{
5463 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5464 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5465 if (IEM_IS_MODRM_REG_MODE(bRm))
5466 {
5467 /*
5468 * Register, register.
5469 */
5470 IEM_MC_BEGIN(2, 1, 0, 0);
5471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5472 IEM_MC_LOCAL(uint8_t, u8Dst);
5473 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5474 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5475 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5476 IEM_MC_PREPARE_SSE_USAGE();
5477 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5478 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5479 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5480 IEM_MC_ADVANCE_RIP_AND_FINISH();
5481 IEM_MC_END();
5482 }
5483 /* No memory operand. */
5484 else
5485 IEMOP_RAISE_INVALID_OPCODE_RET();
5486}
5487
5488
5489/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5490FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5491{
5492 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5493 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5494 if (IEM_IS_MODRM_REG_MODE(bRm))
5495 {
5496 /*
5497 * Register, register.
5498 */
5499 IEM_MC_BEGIN(2, 1, 0, 0);
5500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5501 IEM_MC_LOCAL(uint8_t, u8Dst);
5502 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5503 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5504 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5505 IEM_MC_PREPARE_SSE_USAGE();
5506 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5507 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5508 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG_8(bRm), u8Dst);
5509 IEM_MC_ADVANCE_RIP_AND_FINISH();
5510 IEM_MC_END();
5511 }
5512 /* No memory operand. */
5513 else
5514 IEMOP_RAISE_INVALID_OPCODE_RET();
5515
5516}
5517
5518
5519/* Opcode 0xf3 0x0f 0x50 - invalid */
5520/* Opcode 0xf2 0x0f 0x50 - invalid */
5521
5522
5523/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5524FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5525{
5526 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5527 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5528}
5529
5530
5531/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5532FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5533{
5534 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5535 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5536}
5537
5538
5539/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5540FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5541{
5542 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5543 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5544}
5545
5546
5547/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5548FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5549{
5550 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5551 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5552}
5553
5554
5555/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5556FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5557{
5558 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5559 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5560}
5561
5562
5563/* Opcode 0x66 0x0f 0x52 - invalid */
5564
5565
5566/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5567FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5568{
5569 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5570 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5571}
5572
5573
5574/* Opcode 0xf2 0x0f 0x52 - invalid */
5575
5576/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5577FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
5578/* Opcode 0x66 0x0f 0x53 - invalid */
5579/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5580FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
5581/* Opcode 0xf2 0x0f 0x53 - invalid */
5582
5583
5584/** Opcode 0x0f 0x54 - andps Vps, Wps */
5585FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5586{
5587 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5588 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pand_u128);
5589}
5590
5591
5592/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5593FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5594{
5595 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5596 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5597}
5598
5599
5600/* Opcode 0xf3 0x0f 0x54 - invalid */
5601/* Opcode 0xf2 0x0f 0x54 - invalid */
5602
5603
5604/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5605FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5606{
5607 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5608 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pandn_u128);
5609}
5610
5611
5612/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5613FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5614{
5615 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5616 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5617}
5618
5619
5620/* Opcode 0xf3 0x0f 0x55 - invalid */
5621/* Opcode 0xf2 0x0f 0x55 - invalid */
5622
5623
5624/** Opcode 0x0f 0x56 - orps Vps, Wps */
5625FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5626{
5627 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5628 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_por_u128);
5629}
5630
5631
5632/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5633FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5634{
5635 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5636 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5637}
5638
5639
5640/* Opcode 0xf3 0x0f 0x56 - invalid */
5641/* Opcode 0xf2 0x0f 0x56 - invalid */
5642
5643
5644/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5645FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5646{
5647 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5648 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pxor_u128);
5649}
5650
5651
5652/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5653FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5654{
5655 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5656 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5657}
5658
5659
5660/* Opcode 0xf3 0x0f 0x57 - invalid */
5661/* Opcode 0xf2 0x0f 0x57 - invalid */
5662
5663/** Opcode 0x0f 0x58 - addps Vps, Wps */
5664FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5665{
5666 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5667 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5668}
5669
5670
5671/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5672FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5673{
5674 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5675 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5676}
5677
5678
5679/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5680FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5681{
5682 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5683 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5684}
5685
5686
5687/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5688FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5689{
5690 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5691 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5692}
5693
5694
5695/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5696FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5697{
5698 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5699 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5700}
5701
5702
5703/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5704FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5705{
5706 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5707 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5708}
5709
5710
5711/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5712FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5713{
5714 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5715 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5716}
5717
5718
5719/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5720FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5721{
5722 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5723 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5724}
5725
5726
5727/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5728FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5729{
5730 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5731 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5732}
5733
5734
5735/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5736FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5737{
5738 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5739 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5740}
5741
5742
5743/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5744FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5745{
5746 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5747 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5748}
5749
5750
5751/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5752FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5753{
5754 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5755 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5756}
5757
5758
5759/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5760FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5761{
5762 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5763 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5764}
5765
5766
5767/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5768FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5769{
5770 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5771 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5772}
5773
5774
5775/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5776FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5777{
5778 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5779 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5780}
5781
5782
5783/* Opcode 0xf2 0x0f 0x5b - invalid */
5784
5785
5786/** Opcode 0x0f 0x5c - subps Vps, Wps */
5787FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5788{
5789 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5790 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5791}
5792
5793
5794/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5795FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5796{
5797 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5798 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5799}
5800
5801
5802/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5803FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5804{
5805 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5806 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5807}
5808
5809
5810/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5811FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5812{
5813 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5814 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5815}
5816
5817
5818/** Opcode 0x0f 0x5d - minps Vps, Wps */
5819FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5820{
5821 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5822 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5823}
5824
5825
5826/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5827FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5828{
5829 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5830 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5831}
5832
5833
5834/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5835FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5836{
5837 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5838 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5839}
5840
5841
5842/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5843FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5844{
5845 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5846 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5847}
5848
5849
5850/** Opcode 0x0f 0x5e - divps Vps, Wps */
5851FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5852{
5853 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5854 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5855}
5856
5857
5858/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5859FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5860{
5861 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5862 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5863}
5864
5865
5866/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5867FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5868{
5869 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5870 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5871}
5872
5873
5874/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5875FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5876{
5877 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5878 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5879}
5880
5881
5882/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5883FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5884{
5885 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5886 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5887}
5888
5889
5890/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5891FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5892{
5893 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5894 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5895}
5896
5897
5898/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5899FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5900{
5901 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5902 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5903}
5904
5905
5906/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5907FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5908{
5909 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5910 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5911}
5912
5913
5914/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5915FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5916{
5917 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5918 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5919}
5920
5921
5922/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5923FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5924{
5925 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5926 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5927}
5928
5929
5930/* Opcode 0xf3 0x0f 0x60 - invalid */
5931
5932
5933/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5934FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5935{
5936 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5937 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5938 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5939}
5940
5941
5942/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5943FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5944{
5945 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5946 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5947}
5948
5949
5950/* Opcode 0xf3 0x0f 0x61 - invalid */
5951
5952
5953/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5954FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5955{
5956 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5957 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5958}
5959
5960
5961/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5962FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5963{
5964 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5965 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5966}
5967
5968
5969/* Opcode 0xf3 0x0f 0x62 - invalid */
5970
5971
5972
5973/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5974FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5975{
5976 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5977 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5978}
5979
5980
5981/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5982FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5983{
5984 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5985 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5986}
5987
5988
5989/* Opcode 0xf3 0x0f 0x63 - invalid */
5990
5991
5992/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5993FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5994{
5995 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5996 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5997}
5998
5999
6000/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
6001FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
6002{
6003 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6004 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
6005}
6006
6007
6008/* Opcode 0xf3 0x0f 0x64 - invalid */
6009
6010
6011/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
6012FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
6013{
6014 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6015 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
6016}
6017
6018
6019/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
6020FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
6021{
6022 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6023 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
6024}
6025
6026
6027/* Opcode 0xf3 0x0f 0x65 - invalid */
6028
6029
6030/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
6031FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
6032{
6033 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6034 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
6035}
6036
6037
6038/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
6039FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
6040{
6041 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6042 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
6043}
6044
6045
6046/* Opcode 0xf3 0x0f 0x66 - invalid */
6047
6048
6049/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
6050FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
6051{
6052 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6053 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
6054}
6055
6056
6057/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
6058FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
6059{
6060 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6061 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
6062}
6063
6064
6065/* Opcode 0xf3 0x0f 0x67 - invalid */
6066
6067
6068/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
6069 * @note Intel and AMD both uses Qd for the second parameter, however they
6070 * both list it as a mmX/mem64 operand and intel describes it as being
6071 * loaded as a qword, so it should be Qq, shouldn't it? */
6072FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
6073{
6074 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6075 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
6076}
6077
6078
6079/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
6080FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
6081{
6082 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6083 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
6084}
6085
6086
6087/* Opcode 0xf3 0x0f 0x68 - invalid */
6088
6089
6090/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
6091 * @note Intel and AMD both uses Qd for the second parameter, however they
6092 * both list it as a mmX/mem64 operand and intel describes it as being
6093 * loaded as a qword, so it should be Qq, shouldn't it? */
6094FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
6095{
6096 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6097 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
6098}
6099
6100
6101/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
6102FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
6103{
6104 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6105 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
6106
6107}
6108
6109
6110/* Opcode 0xf3 0x0f 0x69 - invalid */
6111
6112
6113/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
6114 * @note Intel and AMD both uses Qd for the second parameter, however they
6115 * both list it as a mmX/mem64 operand and intel describes it as being
6116 * loaded as a qword, so it should be Qq, shouldn't it? */
6117FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
6118{
6119 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6120 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
6121}
6122
6123
6124/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
6125FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
6126{
6127 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6128 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
6129}
6130
6131
6132/* Opcode 0xf3 0x0f 0x6a - invalid */
6133
6134
6135/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
6136FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
6137{
6138 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6139 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
6140}
6141
6142
6143/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
6144FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
6145{
6146 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6147 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
6148}
6149
6150
6151/* Opcode 0xf3 0x0f 0x6b - invalid */
6152
6153
6154/* Opcode 0x0f 0x6c - invalid */
6155
6156
6157/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
6158FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
6159{
6160 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6161 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
6162}
6163
6164
6165/* Opcode 0xf3 0x0f 0x6c - invalid */
6166/* Opcode 0xf2 0x0f 0x6c - invalid */
6167
6168
6169/* Opcode 0x0f 0x6d - invalid */
6170
6171
6172/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
6173FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
6174{
6175 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6176 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
6177}
6178
6179
6180/* Opcode 0xf3 0x0f 0x6d - invalid */
6181
6182
6183FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6184{
6185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6186 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6187 {
6188 /**
6189 * @opcode 0x6e
6190 * @opcodesub rex.w=1
6191 * @oppfx none
6192 * @opcpuid mmx
6193 * @opgroup og_mmx_datamove
6194 * @opxcpttype 5
6195 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6196 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6197 */
6198 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6199 if (IEM_IS_MODRM_REG_MODE(bRm))
6200 {
6201 /* MMX, greg64 */
6202 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6204 IEM_MC_LOCAL(uint64_t, u64Tmp);
6205
6206 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6207 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6208 IEM_MC_FPU_TO_MMX_MODE();
6209
6210 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6211 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6212
6213 IEM_MC_ADVANCE_RIP_AND_FINISH();
6214 IEM_MC_END();
6215 }
6216 else
6217 {
6218 /* MMX, [mem64] */
6219 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6221 IEM_MC_LOCAL(uint64_t, u64Tmp);
6222
6223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6225 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6226 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6227 IEM_MC_FPU_TO_MMX_MODE();
6228
6229 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6230 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6231
6232 IEM_MC_ADVANCE_RIP_AND_FINISH();
6233 IEM_MC_END();
6234 }
6235 }
6236 else
6237 {
6238 /**
6239 * @opdone
6240 * @opcode 0x6e
6241 * @opcodesub rex.w=0
6242 * @oppfx none
6243 * @opcpuid mmx
6244 * @opgroup og_mmx_datamove
6245 * @opxcpttype 5
6246 * @opfunction iemOp_movd_q_Pd_Ey
6247 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6248 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6249 */
6250 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6251 if (IEM_IS_MODRM_REG_MODE(bRm))
6252 {
6253 /* MMX, greg32 */
6254 IEM_MC_BEGIN(0, 1, 0, 0);
6255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6256 IEM_MC_LOCAL(uint32_t, u32Tmp);
6257
6258 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6259 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6260 IEM_MC_FPU_TO_MMX_MODE();
6261
6262 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6263 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6264
6265 IEM_MC_ADVANCE_RIP_AND_FINISH();
6266 IEM_MC_END();
6267 }
6268 else
6269 {
6270 /* MMX, [mem32] */
6271 IEM_MC_BEGIN(0, 2, 0, 0);
6272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6273 IEM_MC_LOCAL(uint32_t, u32Tmp);
6274
6275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6277 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6278 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6279 IEM_MC_FPU_TO_MMX_MODE();
6280
6281 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6282 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6283
6284 IEM_MC_ADVANCE_RIP_AND_FINISH();
6285 IEM_MC_END();
6286 }
6287 }
6288}
6289
6290FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6291{
6292 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6293 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6294 {
6295 /**
6296 * @opcode 0x6e
6297 * @opcodesub rex.w=1
6298 * @oppfx 0x66
6299 * @opcpuid sse2
6300 * @opgroup og_sse2_simdint_datamove
6301 * @opxcpttype 5
6302 * @optest 64-bit / op1=1 op2=2 -> op1=2
6303 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6304 */
6305 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6306 if (IEM_IS_MODRM_REG_MODE(bRm))
6307 {
6308 /* XMM, greg64 */
6309 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6311 IEM_MC_LOCAL(uint64_t, u64Tmp);
6312
6313 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6314 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6315
6316 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6317 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6318
6319 IEM_MC_ADVANCE_RIP_AND_FINISH();
6320 IEM_MC_END();
6321 }
6322 else
6323 {
6324 /* XMM, [mem64] */
6325 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6327 IEM_MC_LOCAL(uint64_t, u64Tmp);
6328
6329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6331 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6332 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6333
6334 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6335 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6336
6337 IEM_MC_ADVANCE_RIP_AND_FINISH();
6338 IEM_MC_END();
6339 }
6340 }
6341 else
6342 {
6343 /**
6344 * @opdone
6345 * @opcode 0x6e
6346 * @opcodesub rex.w=0
6347 * @oppfx 0x66
6348 * @opcpuid sse2
6349 * @opgroup og_sse2_simdint_datamove
6350 * @opxcpttype 5
6351 * @opfunction iemOp_movd_q_Vy_Ey
6352 * @optest op1=1 op2=2 -> op1=2
6353 * @optest op1=0 op2=-42 -> op1=-42
6354 */
6355 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6356 if (IEM_IS_MODRM_REG_MODE(bRm))
6357 {
6358 /* XMM, greg32 */
6359 IEM_MC_BEGIN(0, 1, 0, 0);
6360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6361 IEM_MC_LOCAL(uint32_t, u32Tmp);
6362
6363 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6364 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6365
6366 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6367 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6368
6369 IEM_MC_ADVANCE_RIP_AND_FINISH();
6370 IEM_MC_END();
6371 }
6372 else
6373 {
6374 /* XMM, [mem32] */
6375 IEM_MC_BEGIN(0, 2, 0, 0);
6376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6377 IEM_MC_LOCAL(uint32_t, u32Tmp);
6378
6379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6381 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6382 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6383
6384 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6385 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6386
6387 IEM_MC_ADVANCE_RIP_AND_FINISH();
6388 IEM_MC_END();
6389 }
6390 }
6391}
6392
6393/* Opcode 0xf3 0x0f 0x6e - invalid */
6394
6395
6396/**
6397 * @opcode 0x6f
6398 * @oppfx none
6399 * @opcpuid mmx
6400 * @opgroup og_mmx_datamove
6401 * @opxcpttype 5
6402 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6403 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6404 */
6405FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6406{
6407 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6409 if (IEM_IS_MODRM_REG_MODE(bRm))
6410 {
6411 /*
6412 * Register, register.
6413 */
6414 IEM_MC_BEGIN(0, 1, 0, 0);
6415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6416 IEM_MC_LOCAL(uint64_t, u64Tmp);
6417
6418 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6419 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6420 IEM_MC_FPU_TO_MMX_MODE();
6421
6422 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6423 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6424
6425 IEM_MC_ADVANCE_RIP_AND_FINISH();
6426 IEM_MC_END();
6427 }
6428 else
6429 {
6430 /*
6431 * Register, memory.
6432 */
6433 IEM_MC_BEGIN(0, 2, 0, 0);
6434 IEM_MC_LOCAL(uint64_t, u64Tmp);
6435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6436
6437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6439 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6440 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6441 IEM_MC_FPU_TO_MMX_MODE();
6442
6443 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6444 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6445
6446 IEM_MC_ADVANCE_RIP_AND_FINISH();
6447 IEM_MC_END();
6448 }
6449}
6450
6451/**
6452 * @opcode 0x6f
6453 * @oppfx 0x66
6454 * @opcpuid sse2
6455 * @opgroup og_sse2_simdint_datamove
6456 * @opxcpttype 1
6457 * @optest op1=1 op2=2 -> op1=2
6458 * @optest op1=0 op2=-42 -> op1=-42
6459 */
6460FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6461{
6462 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6463 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6464 if (IEM_IS_MODRM_REG_MODE(bRm))
6465 {
6466 /*
6467 * Register, register.
6468 */
6469 IEM_MC_BEGIN(0, 0, 0, 0);
6470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6471
6472 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6473 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6474
6475 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6476 IEM_GET_MODRM_RM(pVCpu, bRm));
6477 IEM_MC_ADVANCE_RIP_AND_FINISH();
6478 IEM_MC_END();
6479 }
6480 else
6481 {
6482 /*
6483 * Register, memory.
6484 */
6485 IEM_MC_BEGIN(0, 2, 0, 0);
6486 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6488
6489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6491 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6492 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6493
6494 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6495 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6496
6497 IEM_MC_ADVANCE_RIP_AND_FINISH();
6498 IEM_MC_END();
6499 }
6500}
6501
6502/**
6503 * @opcode 0x6f
6504 * @oppfx 0xf3
6505 * @opcpuid sse2
6506 * @opgroup og_sse2_simdint_datamove
6507 * @opxcpttype 4UA
6508 * @optest op1=1 op2=2 -> op1=2
6509 * @optest op1=0 op2=-42 -> op1=-42
6510 */
6511FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6512{
6513 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6514 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6515 if (IEM_IS_MODRM_REG_MODE(bRm))
6516 {
6517 /*
6518 * Register, register.
6519 */
6520 IEM_MC_BEGIN(0, 0, 0, 0);
6521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6522 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6523 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6524 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6525 IEM_GET_MODRM_RM(pVCpu, bRm));
6526 IEM_MC_ADVANCE_RIP_AND_FINISH();
6527 IEM_MC_END();
6528 }
6529 else
6530 {
6531 /*
6532 * Register, memory.
6533 */
6534 IEM_MC_BEGIN(0, 2, 0, 0);
6535 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6537
6538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6540 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6541 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6542 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6543 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6544
6545 IEM_MC_ADVANCE_RIP_AND_FINISH();
6546 IEM_MC_END();
6547 }
6548}
6549
6550
6551/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6552FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6553{
6554 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6555 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6556 if (IEM_IS_MODRM_REG_MODE(bRm))
6557 {
6558 /*
6559 * Register, register.
6560 */
6561 IEM_MC_BEGIN(3, 0, 0, 0);
6562 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6564 IEM_MC_ARG(uint64_t *, pDst, 0);
6565 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6566 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6567 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6568 IEM_MC_PREPARE_FPU_USAGE();
6569 IEM_MC_FPU_TO_MMX_MODE();
6570
6571 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6572 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6573 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6574 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6575
6576 IEM_MC_ADVANCE_RIP_AND_FINISH();
6577 IEM_MC_END();
6578 }
6579 else
6580 {
6581 /*
6582 * Register, memory.
6583 */
6584 IEM_MC_BEGIN(3, 2, 0, 0);
6585 IEM_MC_ARG(uint64_t *, pDst, 0);
6586 IEM_MC_LOCAL(uint64_t, uSrc);
6587 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6589
6590 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6591 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6592 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6594 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6595 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6596
6597 IEM_MC_PREPARE_FPU_USAGE();
6598 IEM_MC_FPU_TO_MMX_MODE();
6599
6600 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6601 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6602 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6603
6604 IEM_MC_ADVANCE_RIP_AND_FINISH();
6605 IEM_MC_END();
6606 }
6607}
6608
6609
6610/**
6611 * Common worker for SSE2 instructions on the forms:
6612 * pshufd xmm1, xmm2/mem128, imm8
6613 * pshufhw xmm1, xmm2/mem128, imm8
6614 * pshuflw xmm1, xmm2/mem128, imm8
6615 *
6616 * Proper alignment of the 128-bit operand is enforced.
6617 * Exceptions type 4. SSE2 cpuid checks.
6618 */
6619FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6620{
6621 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6622 if (IEM_IS_MODRM_REG_MODE(bRm))
6623 {
6624 /*
6625 * Register, register.
6626 */
6627 IEM_MC_BEGIN(3, 0, 0, 0);
6628 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6630 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6631 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6632 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6633 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6634 IEM_MC_PREPARE_SSE_USAGE();
6635 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6636 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6637 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6638 IEM_MC_ADVANCE_RIP_AND_FINISH();
6639 IEM_MC_END();
6640 }
6641 else
6642 {
6643 /*
6644 * Register, memory.
6645 */
6646 IEM_MC_BEGIN(3, 2, 0, 0);
6647 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6648 IEM_MC_LOCAL(RTUINT128U, uSrc);
6649 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6651
6652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6653 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6654 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6656 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6657
6658 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6659 IEM_MC_PREPARE_SSE_USAGE();
6660 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6661 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6662
6663 IEM_MC_ADVANCE_RIP_AND_FINISH();
6664 IEM_MC_END();
6665 }
6666}
6667
6668
6669/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6670FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6671{
6672 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6673 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6674}
6675
6676
6677/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6678FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6679{
6680 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6681 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6682}
6683
6684
6685/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6686FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6687{
6688 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6689 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6690}
6691
6692
6693/**
6694 * Common worker for MMX instructions of the form:
6695 * psrlw mm, imm8
6696 * psraw mm, imm8
6697 * psllw mm, imm8
6698 * psrld mm, imm8
6699 * psrad mm, imm8
6700 * pslld mm, imm8
6701 * psrlq mm, imm8
6702 * psllq mm, imm8
6703 *
6704 */
6705FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6706{
6707 if (IEM_IS_MODRM_REG_MODE(bRm))
6708 {
6709 /*
6710 * Register, immediate.
6711 */
6712 IEM_MC_BEGIN(2, 0, 0, 0);
6713 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6715 IEM_MC_ARG(uint64_t *, pDst, 0);
6716 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6717 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6718 IEM_MC_PREPARE_FPU_USAGE();
6719 IEM_MC_FPU_TO_MMX_MODE();
6720
6721 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6722 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6723 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6724
6725 IEM_MC_ADVANCE_RIP_AND_FINISH();
6726 IEM_MC_END();
6727 }
6728 else
6729 {
6730 /*
6731 * Register, memory not supported.
6732 */
6733 /// @todo Caller already enforced register mode?!
6734 AssertFailedReturn(VINF_SUCCESS);
6735 }
6736}
6737
6738
6739/**
6740 * Common worker for SSE2 instructions of the form:
6741 * psrlw xmm, imm8
6742 * psraw xmm, imm8
6743 * psllw xmm, imm8
6744 * psrld xmm, imm8
6745 * psrad xmm, imm8
6746 * pslld xmm, imm8
6747 * psrlq xmm, imm8
6748 * psllq xmm, imm8
6749 *
6750 */
6751FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6752{
6753 if (IEM_IS_MODRM_REG_MODE(bRm))
6754 {
6755 /*
6756 * Register, immediate.
6757 */
6758 IEM_MC_BEGIN(2, 0, 0, 0);
6759 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6761 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6762 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6763 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6764 IEM_MC_PREPARE_SSE_USAGE();
6765 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6766 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6767 IEM_MC_ADVANCE_RIP_AND_FINISH();
6768 IEM_MC_END();
6769 }
6770 else
6771 {
6772 /*
6773 * Register, memory.
6774 */
6775 /// @todo Caller already enforced register mode?!
6776 AssertFailedReturn(VINF_SUCCESS);
6777 }
6778}
6779
6780
6781/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6782FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6783{
6784// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6785 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6786}
6787
6788
6789/** Opcode 0x66 0x0f 0x71 11/2. */
6790FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6791{
6792// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6793 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6794}
6795
6796
6797/** Opcode 0x0f 0x71 11/4. */
6798FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6799{
6800// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6801 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6802}
6803
6804
6805/** Opcode 0x66 0x0f 0x71 11/4. */
6806FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6807{
6808// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6809 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6810}
6811
6812
6813/** Opcode 0x0f 0x71 11/6. */
6814FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6815{
6816// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6817 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6818}
6819
6820
6821/** Opcode 0x66 0x0f 0x71 11/6. */
6822FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6823{
6824// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6825 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6826}
6827
6828
6829/**
6830 * Group 12 jump table for register variant.
6831 */
6832IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6833{
6834 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6835 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6836 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6837 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6838 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6839 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6840 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6841 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6842};
6843AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6844
6845
6846/** Opcode 0x0f 0x71. */
6847FNIEMOP_DEF(iemOp_Grp12)
6848{
6849 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6850 if (IEM_IS_MODRM_REG_MODE(bRm))
6851 /* register, register */
6852 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6853 + pVCpu->iem.s.idxPrefix], bRm);
6854 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6855}
6856
6857
6858/** Opcode 0x0f 0x72 11/2. */
6859FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6860{
6861// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6862 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6863}
6864
6865
6866/** Opcode 0x66 0x0f 0x72 11/2. */
6867FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6868{
6869// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6870 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6871}
6872
6873
6874/** Opcode 0x0f 0x72 11/4. */
6875FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6876{
6877// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6878 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6879}
6880
6881
6882/** Opcode 0x66 0x0f 0x72 11/4. */
6883FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6884{
6885// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6886 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6887}
6888
6889
6890/** Opcode 0x0f 0x72 11/6. */
6891FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6892{
6893// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6894 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6895}
6896
6897/** Opcode 0x66 0x0f 0x72 11/6. */
6898FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6899{
6900// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6901 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6902}
6903
6904
6905/**
6906 * Group 13 jump table for register variant.
6907 */
6908IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6909{
6910 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6911 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6912 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6913 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6914 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6915 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6916 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6917 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6918};
6919AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6920
6921/** Opcode 0x0f 0x72. */
6922FNIEMOP_DEF(iemOp_Grp13)
6923{
6924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6925 if (IEM_IS_MODRM_REG_MODE(bRm))
6926 /* register, register */
6927 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6928 + pVCpu->iem.s.idxPrefix], bRm);
6929 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6930}
6931
6932
6933/** Opcode 0x0f 0x73 11/2. */
6934FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6935{
6936// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6937 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6938}
6939
6940
6941/** Opcode 0x66 0x0f 0x73 11/2. */
6942FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6943{
6944// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6945 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6946}
6947
6948
6949/** Opcode 0x66 0x0f 0x73 11/3. */
6950FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6951{
6952// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6953 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6954}
6955
6956
6957/** Opcode 0x0f 0x73 11/6. */
6958FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6959{
6960// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6961 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6962}
6963
6964
6965/** Opcode 0x66 0x0f 0x73 11/6. */
6966FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6967{
6968// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6969 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
6970}
6971
6972
6973/** Opcode 0x66 0x0f 0x73 11/7. */
6974FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6975{
6976// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6977 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
6978}
6979
6980/**
6981 * Group 14 jump table for register variant.
6982 */
6983IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6984{
6985 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6986 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6987 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6988 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6989 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6990 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6991 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6992 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6993};
6994AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6995
6996
6997/** Opcode 0x0f 0x73. */
6998FNIEMOP_DEF(iemOp_Grp14)
6999{
7000 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7001 if (IEM_IS_MODRM_REG_MODE(bRm))
7002 /* register, register */
7003 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7004 + pVCpu->iem.s.idxPrefix], bRm);
7005 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
7006}
7007
7008
7009/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
7010FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
7011{
7012 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7013 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
7014}
7015
7016
7017/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
7018FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
7019{
7020 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7021 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
7022}
7023
7024
7025/* Opcode 0xf3 0x0f 0x74 - invalid */
7026/* Opcode 0xf2 0x0f 0x74 - invalid */
7027
7028
7029/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
7030FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
7031{
7032 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7033 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
7034}
7035
7036
7037/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
7038FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
7039{
7040 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7041 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
7042}
7043
7044
7045/* Opcode 0xf3 0x0f 0x75 - invalid */
7046/* Opcode 0xf2 0x0f 0x75 - invalid */
7047
7048
7049/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
7050FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
7051{
7052 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7053 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
7054}
7055
7056
7057/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
7058FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
7059{
7060 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7061 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
7062}
7063
7064
7065/* Opcode 0xf3 0x0f 0x76 - invalid */
7066/* Opcode 0xf2 0x0f 0x76 - invalid */
7067
7068
7069/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
7070FNIEMOP_DEF(iemOp_emms)
7071{
7072 IEMOP_MNEMONIC(emms, "emms");
7073 IEM_MC_BEGIN(0, 0, 0, 0);
7074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7075 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7076 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7077 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7078 IEM_MC_FPU_FROM_MMX_MODE();
7079 IEM_MC_ADVANCE_RIP_AND_FINISH();
7080 IEM_MC_END();
7081}
7082
7083/* Opcode 0x66 0x0f 0x77 - invalid */
7084/* Opcode 0xf3 0x0f 0x77 - invalid */
7085/* Opcode 0xf2 0x0f 0x77 - invalid */
7086
7087/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
7088#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7089FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
7090{
7091 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
7092 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
7093 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
7094 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7095
7096 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7097 if (IEM_IS_MODRM_REG_MODE(bRm))
7098 {
7099 /*
7100 * Register, register.
7101 */
7102 if (enmEffOpSize == IEMMODE_64BIT)
7103 {
7104 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
7105 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7106 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7107 IEM_MC_ARG(uint64_t, u64Enc, 1);
7108 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7109 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7110 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(IEM_GET_MODRM_RM(pVCpu, bRm));
7111 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmread_reg64, pu64Dst, u64Enc);
7112 IEM_MC_END();
7113 }
7114 else
7115 {
7116 IEM_MC_BEGIN(2, 0, 0, 0);
7117 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7118 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7119 IEM_MC_ARG(uint32_t, u32Enc, 1);
7120 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7121 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7122 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(IEM_GET_MODRM_RM(pVCpu, bRm));
7123 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmread_reg32, pu64Dst, u32Enc);
7124 IEM_MC_END();
7125 }
7126 }
7127 else
7128 {
7129 /*
7130 * Memory, register.
7131 */
7132 if (enmEffOpSize == IEMMODE_64BIT)
7133 {
7134 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
7135 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7137 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7138 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7139 IEM_MC_ARG(uint64_t, u64Enc, 2);
7140 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7141 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7142 iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7143 IEM_MC_END();
7144 }
7145 else
7146 {
7147 IEM_MC_BEGIN(3, 0, 0, 0);
7148 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7150 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7151 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7152 IEM_MC_ARG(uint32_t, u32Enc, 2);
7153 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7154 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7155 iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7156 IEM_MC_END();
7157 }
7158 }
7159}
7160#else
7161FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
7162#endif
7163
7164/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7165FNIEMOP_STUB(iemOp_AmdGrp17);
7166/* Opcode 0xf3 0x0f 0x78 - invalid */
7167/* Opcode 0xf2 0x0f 0x78 - invalid */
7168
7169/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7170#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7171FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7172{
7173 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7174 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7175 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7176 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7177
7178 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7179 if (IEM_IS_MODRM_REG_MODE(bRm))
7180 {
7181 /*
7182 * Register, register.
7183 */
7184 if (enmEffOpSize == IEMMODE_64BIT)
7185 {
7186 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
7187 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7188 IEM_MC_ARG(uint64_t, u64Val, 0);
7189 IEM_MC_ARG(uint64_t, u64Enc, 1);
7190 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7191 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7192 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmwrite_reg, u64Val, u64Enc);
7193 IEM_MC_END();
7194 }
7195 else
7196 {
7197 IEM_MC_BEGIN(2, 0, 0, 0);
7198 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7199 IEM_MC_ARG(uint32_t, u32Val, 0);
7200 IEM_MC_ARG(uint32_t, u32Enc, 1);
7201 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7202 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7203 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmwrite_reg, u32Val, u32Enc);
7204 IEM_MC_END();
7205 }
7206 }
7207 else
7208 {
7209 /*
7210 * Register, memory.
7211 */
7212 if (enmEffOpSize == IEMMODE_64BIT)
7213 {
7214 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
7215 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7217 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7218 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7219 IEM_MC_ARG(uint64_t, u64Enc, 2);
7220 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7221 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7222 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7223 IEM_MC_END();
7224 }
7225 else
7226 {
7227 IEM_MC_BEGIN(3, 0, 0, 0);
7228 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7230 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7231 IEM_MC_ARG(uint32_t, u32Enc, 2);
7232 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7233 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7234 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7235 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7236 IEM_MC_END();
7237 }
7238 }
7239}
7240#else
7241FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
7242#endif
7243/* Opcode 0x66 0x0f 0x79 - invalid */
7244/* Opcode 0xf3 0x0f 0x79 - invalid */
7245/* Opcode 0xf2 0x0f 0x79 - invalid */
7246
7247/* Opcode 0x0f 0x7a - invalid */
7248/* Opcode 0x66 0x0f 0x7a - invalid */
7249/* Opcode 0xf3 0x0f 0x7a - invalid */
7250/* Opcode 0xf2 0x0f 0x7a - invalid */
7251
7252/* Opcode 0x0f 0x7b - invalid */
7253/* Opcode 0x66 0x0f 0x7b - invalid */
7254/* Opcode 0xf3 0x0f 0x7b - invalid */
7255/* Opcode 0xf2 0x0f 0x7b - invalid */
7256
7257/* Opcode 0x0f 0x7c - invalid */
7258
7259
7260/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7261FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7262{
7263 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7264 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7265}
7266
7267
7268/* Opcode 0xf3 0x0f 0x7c - invalid */
7269
7270
7271/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7272FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7273{
7274 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7275 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7276}
7277
7278
7279/* Opcode 0x0f 0x7d - invalid */
7280
7281
7282/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7283FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7284{
7285 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7286 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7287}
7288
7289
7290/* Opcode 0xf3 0x0f 0x7d - invalid */
7291
7292
7293/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7294FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7295{
7296 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7297 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7298}
7299
7300
7301/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7302FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7303{
7304 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7305 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7306 {
7307 /**
7308 * @opcode 0x7e
7309 * @opcodesub rex.w=1
7310 * @oppfx none
7311 * @opcpuid mmx
7312 * @opgroup og_mmx_datamove
7313 * @opxcpttype 5
7314 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7315 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7316 */
7317 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7318 if (IEM_IS_MODRM_REG_MODE(bRm))
7319 {
7320 /* greg64, MMX */
7321 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
7322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7323 IEM_MC_LOCAL(uint64_t, u64Tmp);
7324
7325 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7326 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7327 IEM_MC_FPU_TO_MMX_MODE();
7328
7329 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7330 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7331
7332 IEM_MC_ADVANCE_RIP_AND_FINISH();
7333 IEM_MC_END();
7334 }
7335 else
7336 {
7337 /* [mem64], MMX */
7338 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
7339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7340 IEM_MC_LOCAL(uint64_t, u64Tmp);
7341
7342 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7344 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7345 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7346 IEM_MC_FPU_TO_MMX_MODE();
7347
7348 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7349 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7350
7351 IEM_MC_ADVANCE_RIP_AND_FINISH();
7352 IEM_MC_END();
7353 }
7354 }
7355 else
7356 {
7357 /**
7358 * @opdone
7359 * @opcode 0x7e
7360 * @opcodesub rex.w=0
7361 * @oppfx none
7362 * @opcpuid mmx
7363 * @opgroup og_mmx_datamove
7364 * @opxcpttype 5
7365 * @opfunction iemOp_movd_q_Pd_Ey
7366 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7367 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7368 */
7369 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7370 if (IEM_IS_MODRM_REG_MODE(bRm))
7371 {
7372 /* greg32, MMX */
7373 IEM_MC_BEGIN(0, 1, 0, 0);
7374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7375 IEM_MC_LOCAL(uint32_t, u32Tmp);
7376
7377 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7378 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7379 IEM_MC_FPU_TO_MMX_MODE();
7380
7381 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7382 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7383
7384 IEM_MC_ADVANCE_RIP_AND_FINISH();
7385 IEM_MC_END();
7386 }
7387 else
7388 {
7389 /* [mem32], MMX */
7390 IEM_MC_BEGIN(0, 2, 0, 0);
7391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7392 IEM_MC_LOCAL(uint32_t, u32Tmp);
7393
7394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7396 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7397 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7398 IEM_MC_FPU_TO_MMX_MODE();
7399
7400 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7401 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7402
7403 IEM_MC_ADVANCE_RIP_AND_FINISH();
7404 IEM_MC_END();
7405 }
7406 }
7407}
7408
7409
7410FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7411{
7412 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7413 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7414 {
7415 /**
7416 * @opcode 0x7e
7417 * @opcodesub rex.w=1
7418 * @oppfx 0x66
7419 * @opcpuid sse2
7420 * @opgroup og_sse2_simdint_datamove
7421 * @opxcpttype 5
7422 * @optest 64-bit / op1=1 op2=2 -> op1=2
7423 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7424 */
7425 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7426 if (IEM_IS_MODRM_REG_MODE(bRm))
7427 {
7428 /* greg64, XMM */
7429 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
7430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7431 IEM_MC_LOCAL(uint64_t, u64Tmp);
7432
7433 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7434 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7435
7436 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7437 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7438
7439 IEM_MC_ADVANCE_RIP_AND_FINISH();
7440 IEM_MC_END();
7441 }
7442 else
7443 {
7444 /* [mem64], XMM */
7445 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
7446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7447 IEM_MC_LOCAL(uint64_t, u64Tmp);
7448
7449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7451 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7452 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7453
7454 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7455 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7456
7457 IEM_MC_ADVANCE_RIP_AND_FINISH();
7458 IEM_MC_END();
7459 }
7460 }
7461 else
7462 {
7463 /**
7464 * @opdone
7465 * @opcode 0x7e
7466 * @opcodesub rex.w=0
7467 * @oppfx 0x66
7468 * @opcpuid sse2
7469 * @opgroup og_sse2_simdint_datamove
7470 * @opxcpttype 5
7471 * @opfunction iemOp_movd_q_Vy_Ey
7472 * @optest op1=1 op2=2 -> op1=2
7473 * @optest op1=0 op2=-42 -> op1=-42
7474 */
7475 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7476 if (IEM_IS_MODRM_REG_MODE(bRm))
7477 {
7478 /* greg32, XMM */
7479 IEM_MC_BEGIN(0, 1, 0, 0);
7480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7481 IEM_MC_LOCAL(uint32_t, u32Tmp);
7482
7483 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7484 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7485
7486 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7487 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7488
7489 IEM_MC_ADVANCE_RIP_AND_FINISH();
7490 IEM_MC_END();
7491 }
7492 else
7493 {
7494 /* [mem32], XMM */
7495 IEM_MC_BEGIN(0, 2, 0, 0);
7496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7497 IEM_MC_LOCAL(uint32_t, u32Tmp);
7498
7499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7501 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7502 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7503
7504 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7505 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7506
7507 IEM_MC_ADVANCE_RIP_AND_FINISH();
7508 IEM_MC_END();
7509 }
7510 }
7511}
7512
7513/**
7514 * @opcode 0x7e
7515 * @oppfx 0xf3
7516 * @opcpuid sse2
7517 * @opgroup og_sse2_pcksclr_datamove
7518 * @opxcpttype none
7519 * @optest op1=1 op2=2 -> op1=2
7520 * @optest op1=0 op2=-42 -> op1=-42
7521 */
7522FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7523{
7524 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7526 if (IEM_IS_MODRM_REG_MODE(bRm))
7527 {
7528 /*
7529 * XMM128, XMM64.
7530 */
7531 IEM_MC_BEGIN(0, 2, 0, 0);
7532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7533 IEM_MC_LOCAL(uint64_t, uSrc);
7534
7535 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7536 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7537
7538 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7539 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7540
7541 IEM_MC_ADVANCE_RIP_AND_FINISH();
7542 IEM_MC_END();
7543 }
7544 else
7545 {
7546 /*
7547 * XMM128, [mem64].
7548 */
7549 IEM_MC_BEGIN(0, 2, 0, 0);
7550 IEM_MC_LOCAL(uint64_t, uSrc);
7551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7552
7553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7555 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7556 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7557
7558 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7559 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7560
7561 IEM_MC_ADVANCE_RIP_AND_FINISH();
7562 IEM_MC_END();
7563 }
7564}
7565
7566/* Opcode 0xf2 0x0f 0x7e - invalid */
7567
7568
7569/** Opcode 0x0f 0x7f - movq Qq, Pq */
7570FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7571{
7572 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7574 if (IEM_IS_MODRM_REG_MODE(bRm))
7575 {
7576 /*
7577 * MMX, MMX.
7578 */
7579 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7580 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7581 IEM_MC_BEGIN(0, 1, 0, 0);
7582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7583 IEM_MC_LOCAL(uint64_t, u64Tmp);
7584 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7585 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7586 IEM_MC_FPU_TO_MMX_MODE();
7587
7588 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7589 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7590
7591 IEM_MC_ADVANCE_RIP_AND_FINISH();
7592 IEM_MC_END();
7593 }
7594 else
7595 {
7596 /*
7597 * [mem64], MMX.
7598 */
7599 IEM_MC_BEGIN(0, 2, 0, 0);
7600 IEM_MC_LOCAL(uint64_t, u64Tmp);
7601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7602
7603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7605 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7606 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7607 IEM_MC_FPU_TO_MMX_MODE();
7608
7609 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7610 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7611
7612 IEM_MC_ADVANCE_RIP_AND_FINISH();
7613 IEM_MC_END();
7614 }
7615}
7616
7617/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7618FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7619{
7620 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7621 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7622 if (IEM_IS_MODRM_REG_MODE(bRm))
7623 {
7624 /*
7625 * XMM, XMM.
7626 */
7627 IEM_MC_BEGIN(0, 0, 0, 0);
7628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7629 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7630 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7631 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7632 IEM_GET_MODRM_REG(pVCpu, bRm));
7633 IEM_MC_ADVANCE_RIP_AND_FINISH();
7634 IEM_MC_END();
7635 }
7636 else
7637 {
7638 /*
7639 * [mem128], XMM.
7640 */
7641 IEM_MC_BEGIN(0, 2, 0, 0);
7642 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7644
7645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7647 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7648 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7649
7650 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7651 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7652
7653 IEM_MC_ADVANCE_RIP_AND_FINISH();
7654 IEM_MC_END();
7655 }
7656}
7657
7658/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7659FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7660{
7661 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7663 if (IEM_IS_MODRM_REG_MODE(bRm))
7664 {
7665 /*
7666 * XMM, XMM.
7667 */
7668 IEM_MC_BEGIN(0, 0, 0, 0);
7669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7670 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7671 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7672 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7673 IEM_GET_MODRM_REG(pVCpu, bRm));
7674 IEM_MC_ADVANCE_RIP_AND_FINISH();
7675 IEM_MC_END();
7676 }
7677 else
7678 {
7679 /*
7680 * [mem128], XMM.
7681 */
7682 IEM_MC_BEGIN(0, 2, 0, 0);
7683 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7684 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7685
7686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7688 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7689 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7690
7691 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7692 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7693
7694 IEM_MC_ADVANCE_RIP_AND_FINISH();
7695 IEM_MC_END();
7696 }
7697}
7698
7699/* Opcode 0xf2 0x0f 0x7f - invalid */
7700
7701
7702
7703/** Opcode 0x0f 0x80. */
7704FNIEMOP_DEF(iemOp_jo_Jv)
7705{
7706 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7707 IEMOP_HLP_MIN_386();
7708 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7709 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7710 {
7711 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7712 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7714 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7715 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7716 } IEM_MC_ELSE() {
7717 IEM_MC_ADVANCE_RIP_AND_FINISH();
7718 } IEM_MC_ENDIF();
7719 IEM_MC_END();
7720 }
7721 else
7722 {
7723 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7724 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7726 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7727 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7728 } IEM_MC_ELSE() {
7729 IEM_MC_ADVANCE_RIP_AND_FINISH();
7730 } IEM_MC_ENDIF();
7731 IEM_MC_END();
7732 }
7733}
7734
7735
7736/** Opcode 0x0f 0x81. */
7737FNIEMOP_DEF(iemOp_jno_Jv)
7738{
7739 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7740 IEMOP_HLP_MIN_386();
7741 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7742 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7743 {
7744 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7745 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7747 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7748 IEM_MC_ADVANCE_RIP_AND_FINISH();
7749 } IEM_MC_ELSE() {
7750 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7751 } IEM_MC_ENDIF();
7752 IEM_MC_END();
7753 }
7754 else
7755 {
7756 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7757 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7759 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7760 IEM_MC_ADVANCE_RIP_AND_FINISH();
7761 } IEM_MC_ELSE() {
7762 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7763 } IEM_MC_ENDIF();
7764 IEM_MC_END();
7765 }
7766}
7767
7768
7769/** Opcode 0x0f 0x82. */
7770FNIEMOP_DEF(iemOp_jc_Jv)
7771{
7772 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7773 IEMOP_HLP_MIN_386();
7774 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7775 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7776 {
7777 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7778 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7780 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7781 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7782 } IEM_MC_ELSE() {
7783 IEM_MC_ADVANCE_RIP_AND_FINISH();
7784 } IEM_MC_ENDIF();
7785 IEM_MC_END();
7786 }
7787 else
7788 {
7789 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7790 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7792 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7793 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7794 } IEM_MC_ELSE() {
7795 IEM_MC_ADVANCE_RIP_AND_FINISH();
7796 } IEM_MC_ENDIF();
7797 IEM_MC_END();
7798 }
7799}
7800
7801
7802/** Opcode 0x0f 0x83. */
7803FNIEMOP_DEF(iemOp_jnc_Jv)
7804{
7805 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7806 IEMOP_HLP_MIN_386();
7807 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7808 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7809 {
7810 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7811 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7813 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7814 IEM_MC_ADVANCE_RIP_AND_FINISH();
7815 } IEM_MC_ELSE() {
7816 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7817 } IEM_MC_ENDIF();
7818 IEM_MC_END();
7819 }
7820 else
7821 {
7822 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7823 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7825 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7826 IEM_MC_ADVANCE_RIP_AND_FINISH();
7827 } IEM_MC_ELSE() {
7828 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7829 } IEM_MC_ENDIF();
7830 IEM_MC_END();
7831 }
7832}
7833
7834
7835/** Opcode 0x0f 0x84. */
7836FNIEMOP_DEF(iemOp_je_Jv)
7837{
7838 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7839 IEMOP_HLP_MIN_386();
7840 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7841 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7842 {
7843 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7844 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7846 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7847 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7848 } IEM_MC_ELSE() {
7849 IEM_MC_ADVANCE_RIP_AND_FINISH();
7850 } IEM_MC_ENDIF();
7851 IEM_MC_END();
7852 }
7853 else
7854 {
7855 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7856 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7858 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7859 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7860 } IEM_MC_ELSE() {
7861 IEM_MC_ADVANCE_RIP_AND_FINISH();
7862 } IEM_MC_ENDIF();
7863 IEM_MC_END();
7864 }
7865}
7866
7867
7868/** Opcode 0x0f 0x85. */
7869FNIEMOP_DEF(iemOp_jne_Jv)
7870{
7871 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7872 IEMOP_HLP_MIN_386();
7873 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7874 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7875 {
7876 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7877 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7879 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7880 IEM_MC_ADVANCE_RIP_AND_FINISH();
7881 } IEM_MC_ELSE() {
7882 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7883 } IEM_MC_ENDIF();
7884 IEM_MC_END();
7885 }
7886 else
7887 {
7888 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7889 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7891 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7892 IEM_MC_ADVANCE_RIP_AND_FINISH();
7893 } IEM_MC_ELSE() {
7894 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7895 } IEM_MC_ENDIF();
7896 IEM_MC_END();
7897 }
7898}
7899
7900
7901/** Opcode 0x0f 0x86. */
7902FNIEMOP_DEF(iemOp_jbe_Jv)
7903{
7904 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7905 IEMOP_HLP_MIN_386();
7906 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7907 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7908 {
7909 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7910 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7912 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7913 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7914 } IEM_MC_ELSE() {
7915 IEM_MC_ADVANCE_RIP_AND_FINISH();
7916 } IEM_MC_ENDIF();
7917 IEM_MC_END();
7918 }
7919 else
7920 {
7921 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7922 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7924 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7925 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7926 } IEM_MC_ELSE() {
7927 IEM_MC_ADVANCE_RIP_AND_FINISH();
7928 } IEM_MC_ENDIF();
7929 IEM_MC_END();
7930 }
7931}
7932
7933
7934/** Opcode 0x0f 0x87. */
7935FNIEMOP_DEF(iemOp_jnbe_Jv)
7936{
7937 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7938 IEMOP_HLP_MIN_386();
7939 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7940 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7941 {
7942 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7943 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7945 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7946 IEM_MC_ADVANCE_RIP_AND_FINISH();
7947 } IEM_MC_ELSE() {
7948 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7949 } IEM_MC_ENDIF();
7950 IEM_MC_END();
7951 }
7952 else
7953 {
7954 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7955 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7957 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7958 IEM_MC_ADVANCE_RIP_AND_FINISH();
7959 } IEM_MC_ELSE() {
7960 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7961 } IEM_MC_ENDIF();
7962 IEM_MC_END();
7963 }
7964}
7965
7966
7967/** Opcode 0x0f 0x88. */
7968FNIEMOP_DEF(iemOp_js_Jv)
7969{
7970 IEMOP_MNEMONIC(js_Jv, "js Jv");
7971 IEMOP_HLP_MIN_386();
7972 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7973 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7974 {
7975 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7976 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7978 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7979 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7980 } IEM_MC_ELSE() {
7981 IEM_MC_ADVANCE_RIP_AND_FINISH();
7982 } IEM_MC_ENDIF();
7983 IEM_MC_END();
7984 }
7985 else
7986 {
7987 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7988 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7990 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7991 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7992 } IEM_MC_ELSE() {
7993 IEM_MC_ADVANCE_RIP_AND_FINISH();
7994 } IEM_MC_ENDIF();
7995 IEM_MC_END();
7996 }
7997}
7998
7999
8000/** Opcode 0x0f 0x89. */
8001FNIEMOP_DEF(iemOp_jns_Jv)
8002{
8003 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
8004 IEMOP_HLP_MIN_386();
8005 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8006 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8007 {
8008 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8009 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8011 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8012 IEM_MC_ADVANCE_RIP_AND_FINISH();
8013 } IEM_MC_ELSE() {
8014 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8015 } IEM_MC_ENDIF();
8016 IEM_MC_END();
8017 }
8018 else
8019 {
8020 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8021 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8023 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8024 IEM_MC_ADVANCE_RIP_AND_FINISH();
8025 } IEM_MC_ELSE() {
8026 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8027 } IEM_MC_ENDIF();
8028 IEM_MC_END();
8029 }
8030}
8031
8032
8033/** Opcode 0x0f 0x8a. */
8034FNIEMOP_DEF(iemOp_jp_Jv)
8035{
8036 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
8037 IEMOP_HLP_MIN_386();
8038 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8039 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8040 {
8041 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8042 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8044 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8045 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8046 } IEM_MC_ELSE() {
8047 IEM_MC_ADVANCE_RIP_AND_FINISH();
8048 } IEM_MC_ENDIF();
8049 IEM_MC_END();
8050 }
8051 else
8052 {
8053 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8054 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8056 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8057 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8058 } IEM_MC_ELSE() {
8059 IEM_MC_ADVANCE_RIP_AND_FINISH();
8060 } IEM_MC_ENDIF();
8061 IEM_MC_END();
8062 }
8063}
8064
8065
8066/** Opcode 0x0f 0x8b. */
8067FNIEMOP_DEF(iemOp_jnp_Jv)
8068{
8069 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
8070 IEMOP_HLP_MIN_386();
8071 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8072 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8073 {
8074 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8075 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8077 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8078 IEM_MC_ADVANCE_RIP_AND_FINISH();
8079 } IEM_MC_ELSE() {
8080 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8081 } IEM_MC_ENDIF();
8082 IEM_MC_END();
8083 }
8084 else
8085 {
8086 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8087 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8089 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8090 IEM_MC_ADVANCE_RIP_AND_FINISH();
8091 } IEM_MC_ELSE() {
8092 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8093 } IEM_MC_ENDIF();
8094 IEM_MC_END();
8095 }
8096}
8097
8098
8099/** Opcode 0x0f 0x8c. */
8100FNIEMOP_DEF(iemOp_jl_Jv)
8101{
8102 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
8103 IEMOP_HLP_MIN_386();
8104 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8105 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8106 {
8107 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8108 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8110 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8111 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8112 } IEM_MC_ELSE() {
8113 IEM_MC_ADVANCE_RIP_AND_FINISH();
8114 } IEM_MC_ENDIF();
8115 IEM_MC_END();
8116 }
8117 else
8118 {
8119 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8120 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8122 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8123 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8124 } IEM_MC_ELSE() {
8125 IEM_MC_ADVANCE_RIP_AND_FINISH();
8126 } IEM_MC_ENDIF();
8127 IEM_MC_END();
8128 }
8129}
8130
8131
8132/** Opcode 0x0f 0x8d. */
8133FNIEMOP_DEF(iemOp_jnl_Jv)
8134{
8135 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8136 IEMOP_HLP_MIN_386();
8137 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8138 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8139 {
8140 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8141 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8143 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8144 IEM_MC_ADVANCE_RIP_AND_FINISH();
8145 } IEM_MC_ELSE() {
8146 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8147 } IEM_MC_ENDIF();
8148 IEM_MC_END();
8149 }
8150 else
8151 {
8152 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8153 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8155 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8156 IEM_MC_ADVANCE_RIP_AND_FINISH();
8157 } IEM_MC_ELSE() {
8158 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8159 } IEM_MC_ENDIF();
8160 IEM_MC_END();
8161 }
8162}
8163
8164
8165/** Opcode 0x0f 0x8e. */
8166FNIEMOP_DEF(iemOp_jle_Jv)
8167{
8168 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8169 IEMOP_HLP_MIN_386();
8170 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8171 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8172 {
8173 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8174 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8176 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8177 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8178 } IEM_MC_ELSE() {
8179 IEM_MC_ADVANCE_RIP_AND_FINISH();
8180 } IEM_MC_ENDIF();
8181 IEM_MC_END();
8182 }
8183 else
8184 {
8185 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8186 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8188 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8189 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8190 } IEM_MC_ELSE() {
8191 IEM_MC_ADVANCE_RIP_AND_FINISH();
8192 } IEM_MC_ENDIF();
8193 IEM_MC_END();
8194 }
8195}
8196
8197
8198/** Opcode 0x0f 0x8f. */
8199FNIEMOP_DEF(iemOp_jnle_Jv)
8200{
8201 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8202 IEMOP_HLP_MIN_386();
8203 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8204 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8205 {
8206 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8207 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8209 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8210 IEM_MC_ADVANCE_RIP_AND_FINISH();
8211 } IEM_MC_ELSE() {
8212 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8213 } IEM_MC_ENDIF();
8214 IEM_MC_END();
8215 }
8216 else
8217 {
8218 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8219 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8221 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8222 IEM_MC_ADVANCE_RIP_AND_FINISH();
8223 } IEM_MC_ELSE() {
8224 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8225 } IEM_MC_ENDIF();
8226 IEM_MC_END();
8227 }
8228}
8229
8230
8231/** Opcode 0x0f 0x90. */
8232FNIEMOP_DEF(iemOp_seto_Eb)
8233{
8234 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8235 IEMOP_HLP_MIN_386();
8236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8237
8238 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8239 * any way. AMD says it's "unused", whatever that means. We're
8240 * ignoring for now. */
8241 if (IEM_IS_MODRM_REG_MODE(bRm))
8242 {
8243 /* register target */
8244 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8246 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8247 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8248 } IEM_MC_ELSE() {
8249 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8250 } IEM_MC_ENDIF();
8251 IEM_MC_ADVANCE_RIP_AND_FINISH();
8252 IEM_MC_END();
8253 }
8254 else
8255 {
8256 /* memory target */
8257 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8261 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8262 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8263 } IEM_MC_ELSE() {
8264 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8265 } IEM_MC_ENDIF();
8266 IEM_MC_ADVANCE_RIP_AND_FINISH();
8267 IEM_MC_END();
8268 }
8269}
8270
8271
8272/** Opcode 0x0f 0x91. */
8273FNIEMOP_DEF(iemOp_setno_Eb)
8274{
8275 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8276 IEMOP_HLP_MIN_386();
8277 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8278
8279 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8280 * any way. AMD says it's "unused", whatever that means. We're
8281 * ignoring for now. */
8282 if (IEM_IS_MODRM_REG_MODE(bRm))
8283 {
8284 /* register target */
8285 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8287 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8288 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8289 } IEM_MC_ELSE() {
8290 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8291 } IEM_MC_ENDIF();
8292 IEM_MC_ADVANCE_RIP_AND_FINISH();
8293 IEM_MC_END();
8294 }
8295 else
8296 {
8297 /* memory target */
8298 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8302 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8303 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8304 } IEM_MC_ELSE() {
8305 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8306 } IEM_MC_ENDIF();
8307 IEM_MC_ADVANCE_RIP_AND_FINISH();
8308 IEM_MC_END();
8309 }
8310}
8311
8312
8313/** Opcode 0x0f 0x92. */
8314FNIEMOP_DEF(iemOp_setc_Eb)
8315{
8316 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8317 IEMOP_HLP_MIN_386();
8318 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8319
8320 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8321 * any way. AMD says it's "unused", whatever that means. We're
8322 * ignoring for now. */
8323 if (IEM_IS_MODRM_REG_MODE(bRm))
8324 {
8325 /* register target */
8326 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8328 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8329 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8330 } IEM_MC_ELSE() {
8331 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8332 } IEM_MC_ENDIF();
8333 IEM_MC_ADVANCE_RIP_AND_FINISH();
8334 IEM_MC_END();
8335 }
8336 else
8337 {
8338 /* memory target */
8339 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8343 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8344 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8345 } IEM_MC_ELSE() {
8346 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8347 } IEM_MC_ENDIF();
8348 IEM_MC_ADVANCE_RIP_AND_FINISH();
8349 IEM_MC_END();
8350 }
8351}
8352
8353
8354/** Opcode 0x0f 0x93. */
8355FNIEMOP_DEF(iemOp_setnc_Eb)
8356{
8357 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8358 IEMOP_HLP_MIN_386();
8359 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8360
8361 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8362 * any way. AMD says it's "unused", whatever that means. We're
8363 * ignoring for now. */
8364 if (IEM_IS_MODRM_REG_MODE(bRm))
8365 {
8366 /* register target */
8367 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8369 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8370 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8371 } IEM_MC_ELSE() {
8372 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8373 } IEM_MC_ENDIF();
8374 IEM_MC_ADVANCE_RIP_AND_FINISH();
8375 IEM_MC_END();
8376 }
8377 else
8378 {
8379 /* memory target */
8380 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8384 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8385 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8386 } IEM_MC_ELSE() {
8387 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8388 } IEM_MC_ENDIF();
8389 IEM_MC_ADVANCE_RIP_AND_FINISH();
8390 IEM_MC_END();
8391 }
8392}
8393
8394
8395/** Opcode 0x0f 0x94. */
8396FNIEMOP_DEF(iemOp_sete_Eb)
8397{
8398 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8399 IEMOP_HLP_MIN_386();
8400 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8401
8402 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8403 * any way. AMD says it's "unused", whatever that means. We're
8404 * ignoring for now. */
8405 if (IEM_IS_MODRM_REG_MODE(bRm))
8406 {
8407 /* register target */
8408 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8410 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8411 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8412 } IEM_MC_ELSE() {
8413 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8414 } IEM_MC_ENDIF();
8415 IEM_MC_ADVANCE_RIP_AND_FINISH();
8416 IEM_MC_END();
8417 }
8418 else
8419 {
8420 /* memory target */
8421 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8422 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8425 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8426 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8427 } IEM_MC_ELSE() {
8428 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8429 } IEM_MC_ENDIF();
8430 IEM_MC_ADVANCE_RIP_AND_FINISH();
8431 IEM_MC_END();
8432 }
8433}
8434
8435
8436/** Opcode 0x0f 0x95. */
8437FNIEMOP_DEF(iemOp_setne_Eb)
8438{
8439 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8440 IEMOP_HLP_MIN_386();
8441 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8442
8443 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8444 * any way. AMD says it's "unused", whatever that means. We're
8445 * ignoring for now. */
8446 if (IEM_IS_MODRM_REG_MODE(bRm))
8447 {
8448 /* register target */
8449 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8451 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8452 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8453 } IEM_MC_ELSE() {
8454 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8455 } IEM_MC_ENDIF();
8456 IEM_MC_ADVANCE_RIP_AND_FINISH();
8457 IEM_MC_END();
8458 }
8459 else
8460 {
8461 /* memory target */
8462 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8463 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8466 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8467 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8468 } IEM_MC_ELSE() {
8469 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8470 } IEM_MC_ENDIF();
8471 IEM_MC_ADVANCE_RIP_AND_FINISH();
8472 IEM_MC_END();
8473 }
8474}
8475
8476
8477/** Opcode 0x0f 0x96. */
8478FNIEMOP_DEF(iemOp_setbe_Eb)
8479{
8480 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8481 IEMOP_HLP_MIN_386();
8482 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8483
8484 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8485 * any way. AMD says it's "unused", whatever that means. We're
8486 * ignoring for now. */
8487 if (IEM_IS_MODRM_REG_MODE(bRm))
8488 {
8489 /* register target */
8490 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8492 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8493 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8494 } IEM_MC_ELSE() {
8495 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8496 } IEM_MC_ENDIF();
8497 IEM_MC_ADVANCE_RIP_AND_FINISH();
8498 IEM_MC_END();
8499 }
8500 else
8501 {
8502 /* memory target */
8503 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8507 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8508 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8509 } IEM_MC_ELSE() {
8510 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8511 } IEM_MC_ENDIF();
8512 IEM_MC_ADVANCE_RIP_AND_FINISH();
8513 IEM_MC_END();
8514 }
8515}
8516
8517
8518/** Opcode 0x0f 0x97. */
8519FNIEMOP_DEF(iemOp_setnbe_Eb)
8520{
8521 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8522 IEMOP_HLP_MIN_386();
8523 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8524
8525 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8526 * any way. AMD says it's "unused", whatever that means. We're
8527 * ignoring for now. */
8528 if (IEM_IS_MODRM_REG_MODE(bRm))
8529 {
8530 /* register target */
8531 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8533 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8534 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8535 } IEM_MC_ELSE() {
8536 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8537 } IEM_MC_ENDIF();
8538 IEM_MC_ADVANCE_RIP_AND_FINISH();
8539 IEM_MC_END();
8540 }
8541 else
8542 {
8543 /* memory target */
8544 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8545 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8548 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8549 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8550 } IEM_MC_ELSE() {
8551 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8552 } IEM_MC_ENDIF();
8553 IEM_MC_ADVANCE_RIP_AND_FINISH();
8554 IEM_MC_END();
8555 }
8556}
8557
8558
8559/** Opcode 0x0f 0x98. */
8560FNIEMOP_DEF(iemOp_sets_Eb)
8561{
8562 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8563 IEMOP_HLP_MIN_386();
8564 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8565
8566 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8567 * any way. AMD says it's "unused", whatever that means. We're
8568 * ignoring for now. */
8569 if (IEM_IS_MODRM_REG_MODE(bRm))
8570 {
8571 /* register target */
8572 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8574 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8575 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8576 } IEM_MC_ELSE() {
8577 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8578 } IEM_MC_ENDIF();
8579 IEM_MC_ADVANCE_RIP_AND_FINISH();
8580 IEM_MC_END();
8581 }
8582 else
8583 {
8584 /* memory target */
8585 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8587 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8589 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8590 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8591 } IEM_MC_ELSE() {
8592 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8593 } IEM_MC_ENDIF();
8594 IEM_MC_ADVANCE_RIP_AND_FINISH();
8595 IEM_MC_END();
8596 }
8597}
8598
8599
8600/** Opcode 0x0f 0x99. */
8601FNIEMOP_DEF(iemOp_setns_Eb)
8602{
8603 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8604 IEMOP_HLP_MIN_386();
8605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8606
8607 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8608 * any way. AMD says it's "unused", whatever that means. We're
8609 * ignoring for now. */
8610 if (IEM_IS_MODRM_REG_MODE(bRm))
8611 {
8612 /* register target */
8613 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8615 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8616 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8617 } IEM_MC_ELSE() {
8618 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8619 } IEM_MC_ENDIF();
8620 IEM_MC_ADVANCE_RIP_AND_FINISH();
8621 IEM_MC_END();
8622 }
8623 else
8624 {
8625 /* memory target */
8626 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8630 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8631 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8632 } IEM_MC_ELSE() {
8633 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8634 } IEM_MC_ENDIF();
8635 IEM_MC_ADVANCE_RIP_AND_FINISH();
8636 IEM_MC_END();
8637 }
8638}
8639
8640
8641/** Opcode 0x0f 0x9a. */
8642FNIEMOP_DEF(iemOp_setp_Eb)
8643{
8644 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8645 IEMOP_HLP_MIN_386();
8646 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8647
8648 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8649 * any way. AMD says it's "unused", whatever that means. We're
8650 * ignoring for now. */
8651 if (IEM_IS_MODRM_REG_MODE(bRm))
8652 {
8653 /* register target */
8654 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8656 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8657 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8658 } IEM_MC_ELSE() {
8659 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8660 } IEM_MC_ENDIF();
8661 IEM_MC_ADVANCE_RIP_AND_FINISH();
8662 IEM_MC_END();
8663 }
8664 else
8665 {
8666 /* memory target */
8667 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8671 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8672 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8673 } IEM_MC_ELSE() {
8674 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8675 } IEM_MC_ENDIF();
8676 IEM_MC_ADVANCE_RIP_AND_FINISH();
8677 IEM_MC_END();
8678 }
8679}
8680
8681
8682/** Opcode 0x0f 0x9b. */
8683FNIEMOP_DEF(iemOp_setnp_Eb)
8684{
8685 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8686 IEMOP_HLP_MIN_386();
8687 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8688
8689 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8690 * any way. AMD says it's "unused", whatever that means. We're
8691 * ignoring for now. */
8692 if (IEM_IS_MODRM_REG_MODE(bRm))
8693 {
8694 /* register target */
8695 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8697 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8698 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8699 } IEM_MC_ELSE() {
8700 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8701 } IEM_MC_ENDIF();
8702 IEM_MC_ADVANCE_RIP_AND_FINISH();
8703 IEM_MC_END();
8704 }
8705 else
8706 {
8707 /* memory target */
8708 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8709 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8712 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8713 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8714 } IEM_MC_ELSE() {
8715 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8716 } IEM_MC_ENDIF();
8717 IEM_MC_ADVANCE_RIP_AND_FINISH();
8718 IEM_MC_END();
8719 }
8720}
8721
8722
8723/** Opcode 0x0f 0x9c. */
8724FNIEMOP_DEF(iemOp_setl_Eb)
8725{
8726 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8727 IEMOP_HLP_MIN_386();
8728 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8729
8730 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8731 * any way. AMD says it's "unused", whatever that means. We're
8732 * ignoring for now. */
8733 if (IEM_IS_MODRM_REG_MODE(bRm))
8734 {
8735 /* register target */
8736 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8738 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8739 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8740 } IEM_MC_ELSE() {
8741 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8742 } IEM_MC_ENDIF();
8743 IEM_MC_ADVANCE_RIP_AND_FINISH();
8744 IEM_MC_END();
8745 }
8746 else
8747 {
8748 /* memory target */
8749 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8753 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8754 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8755 } IEM_MC_ELSE() {
8756 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8757 } IEM_MC_ENDIF();
8758 IEM_MC_ADVANCE_RIP_AND_FINISH();
8759 IEM_MC_END();
8760 }
8761}
8762
8763
8764/** Opcode 0x0f 0x9d. */
8765FNIEMOP_DEF(iemOp_setnl_Eb)
8766{
8767 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8768 IEMOP_HLP_MIN_386();
8769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8770
8771 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8772 * any way. AMD says it's "unused", whatever that means. We're
8773 * ignoring for now. */
8774 if (IEM_IS_MODRM_REG_MODE(bRm))
8775 {
8776 /* register target */
8777 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8779 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8780 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8781 } IEM_MC_ELSE() {
8782 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8783 } IEM_MC_ENDIF();
8784 IEM_MC_ADVANCE_RIP_AND_FINISH();
8785 IEM_MC_END();
8786 }
8787 else
8788 {
8789 /* memory target */
8790 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8794 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8795 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8796 } IEM_MC_ELSE() {
8797 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8798 } IEM_MC_ENDIF();
8799 IEM_MC_ADVANCE_RIP_AND_FINISH();
8800 IEM_MC_END();
8801 }
8802}
8803
8804
8805/** Opcode 0x0f 0x9e. */
8806FNIEMOP_DEF(iemOp_setle_Eb)
8807{
8808 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8809 IEMOP_HLP_MIN_386();
8810 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8811
8812 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8813 * any way. AMD says it's "unused", whatever that means. We're
8814 * ignoring for now. */
8815 if (IEM_IS_MODRM_REG_MODE(bRm))
8816 {
8817 /* register target */
8818 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8820 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8821 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8822 } IEM_MC_ELSE() {
8823 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8824 } IEM_MC_ENDIF();
8825 IEM_MC_ADVANCE_RIP_AND_FINISH();
8826 IEM_MC_END();
8827 }
8828 else
8829 {
8830 /* memory target */
8831 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8835 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8836 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8837 } IEM_MC_ELSE() {
8838 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8839 } IEM_MC_ENDIF();
8840 IEM_MC_ADVANCE_RIP_AND_FINISH();
8841 IEM_MC_END();
8842 }
8843}
8844
8845
8846/** Opcode 0x0f 0x9f. */
8847FNIEMOP_DEF(iemOp_setnle_Eb)
8848{
8849 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8850 IEMOP_HLP_MIN_386();
8851 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8852
8853 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8854 * any way. AMD says it's "unused", whatever that means. We're
8855 * ignoring for now. */
8856 if (IEM_IS_MODRM_REG_MODE(bRm))
8857 {
8858 /* register target */
8859 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8861 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8862 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8863 } IEM_MC_ELSE() {
8864 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8865 } IEM_MC_ENDIF();
8866 IEM_MC_ADVANCE_RIP_AND_FINISH();
8867 IEM_MC_END();
8868 }
8869 else
8870 {
8871 /* memory target */
8872 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8876 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8877 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8878 } IEM_MC_ELSE() {
8879 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8880 } IEM_MC_ENDIF();
8881 IEM_MC_ADVANCE_RIP_AND_FINISH();
8882 IEM_MC_END();
8883 }
8884}
8885
8886
8887/** Opcode 0x0f 0xa0. */
8888FNIEMOP_DEF(iemOp_push_fs)
8889{
8890 IEMOP_MNEMONIC(push_fs, "push fs");
8891 IEMOP_HLP_MIN_386();
8892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8893 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8894}
8895
8896
8897/** Opcode 0x0f 0xa1. */
8898FNIEMOP_DEF(iemOp_pop_fs)
8899{
8900 IEMOP_MNEMONIC(pop_fs, "pop fs");
8901 IEMOP_HLP_MIN_386();
8902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8903 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8904}
8905
8906
8907/** Opcode 0x0f 0xa2. */
8908FNIEMOP_DEF(iemOp_cpuid)
8909{
8910 IEMOP_MNEMONIC(cpuid, "cpuid");
8911 IEMOP_HLP_MIN_486(); /* not all 486es. */
8912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8913 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_cpuid);
8914}
8915
8916
8917/**
8918 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8919 * iemOp_bts_Ev_Gv.
8920 */
8921
8922#define IEMOP_BODY_BIT_Ev_Gv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
8923 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8924 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8925 \
8926 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8927 { \
8928 /* register destination. */ \
8929 switch (pVCpu->iem.s.enmEffOpSize) \
8930 { \
8931 case IEMMODE_16BIT: \
8932 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
8933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8934 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8935 IEM_MC_ARG(uint16_t, u16Src, 1); \
8936 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8937 \
8938 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8939 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
8940 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8941 IEM_MC_REF_EFLAGS(pEFlags); \
8942 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
8943 \
8944 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8945 IEM_MC_END(); \
8946 break; \
8947 \
8948 case IEMMODE_32BIT: \
8949 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
8950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8951 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8952 IEM_MC_ARG(uint32_t, u32Src, 1); \
8953 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8954 \
8955 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8956 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
8957 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8958 IEM_MC_REF_EFLAGS(pEFlags); \
8959 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
8960 \
8961 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8962 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8963 IEM_MC_END(); \
8964 break; \
8965 \
8966 case IEMMODE_64BIT: \
8967 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
8968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8969 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8970 IEM_MC_ARG(uint64_t, u64Src, 1); \
8971 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8972 \
8973 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8974 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
8975 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8976 IEM_MC_REF_EFLAGS(pEFlags); \
8977 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
8978 \
8979 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8980 IEM_MC_END(); \
8981 break; \
8982 \
8983 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8984 } \
8985 } \
8986 else \
8987 { \
8988 /* memory destination. */ \
8989 /** @todo test negative bit offsets! */ \
8990 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
8991 { \
8992 switch (pVCpu->iem.s.enmEffOpSize) \
8993 { \
8994 case IEMMODE_16BIT: \
8995 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
8996 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8997 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8998 IEMOP_HLP_DONE_DECODING(); \
8999 \
9000 IEM_MC_ARG(uint16_t, u16Src, 1); \
9001 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9002 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9003 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9004 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9005 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9006 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9007 \
9008 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9009 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9010 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9011 \
9012 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9013 IEM_MC_FETCH_EFLAGS(EFlags); \
9014 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9015 \
9016 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
9017 IEM_MC_COMMIT_EFLAGS(EFlags); \
9018 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9019 IEM_MC_END(); \
9020 break; \
9021 \
9022 case IEMMODE_32BIT: \
9023 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9024 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9026 IEMOP_HLP_DONE_DECODING(); \
9027 \
9028 IEM_MC_ARG(uint32_t, u32Src, 1); \
9029 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9030 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9031 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9032 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9033 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9034 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9035 \
9036 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9037 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9038 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9039 \
9040 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9041 IEM_MC_FETCH_EFLAGS(EFlags); \
9042 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9043 \
9044 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
9045 IEM_MC_COMMIT_EFLAGS(EFlags); \
9046 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9047 IEM_MC_END(); \
9048 break; \
9049 \
9050 case IEMMODE_64BIT: \
9051 IEM_MC_BEGIN(3, 5, IEM_MC_F_64BIT, 0); \
9052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9054 IEMOP_HLP_DONE_DECODING(); \
9055 \
9056 IEM_MC_ARG(uint64_t, u64Src, 1); \
9057 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9058 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9059 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9060 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9061 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9062 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9063 \
9064 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9065 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9066 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9067 \
9068 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9069 IEM_MC_FETCH_EFLAGS(EFlags); \
9070 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9071 \
9072 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
9073 IEM_MC_COMMIT_EFLAGS(EFlags); \
9074 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9075 IEM_MC_END(); \
9076 break; \
9077 \
9078 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9079 } \
9080 } \
9081 else \
9082 { \
9083 (void)0
9084/* Separate macro to work around parsing issue in IEMAllInstPython.py */
9085#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
9086 switch (pVCpu->iem.s.enmEffOpSize) \
9087 { \
9088 case IEMMODE_16BIT: \
9089 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9092 IEMOP_HLP_DONE_DECODING(); \
9093 \
9094 IEM_MC_ARG(uint16_t, u16Src, 1); \
9095 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9096 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9097 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9098 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9099 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9100 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9101 \
9102 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9103 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9104 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9105 \
9106 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9107 IEM_MC_FETCH_EFLAGS(EFlags); \
9108 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
9109 \
9110 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
9111 IEM_MC_COMMIT_EFLAGS(EFlags); \
9112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9113 IEM_MC_END(); \
9114 break; \
9115 \
9116 case IEMMODE_32BIT: \
9117 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9119 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9120 IEMOP_HLP_DONE_DECODING(); \
9121 \
9122 IEM_MC_ARG(uint32_t, u32Src, 1); \
9123 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9124 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9125 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9126 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9127 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9128 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9129 \
9130 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9131 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9132 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9133 \
9134 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9135 IEM_MC_FETCH_EFLAGS(EFlags); \
9136 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
9137 \
9138 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
9139 IEM_MC_COMMIT_EFLAGS(EFlags); \
9140 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9141 IEM_MC_END(); \
9142 break; \
9143 \
9144 case IEMMODE_64BIT: \
9145 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0); \
9146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9148 IEMOP_HLP_DONE_DECODING(); \
9149 \
9150 IEM_MC_ARG(uint64_t, u64Src, 1); \
9151 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9152 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9153 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9154 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9155 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9156 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9157 \
9158 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9159 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9160 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9161 \
9162 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9163 IEM_MC_FETCH_EFLAGS(EFlags); \
9164 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
9165 \
9166 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
9167 IEM_MC_COMMIT_EFLAGS(EFlags); \
9168 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9169 IEM_MC_END(); \
9170 break; \
9171 \
9172 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9173 } \
9174 } \
9175 } \
9176 (void)0
9177
9178/* Read-only version (bt). */
9179#define IEMOP_BODY_BIT_Ev_Gv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
9180 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9181 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
9182 \
9183 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9184 { \
9185 /* register destination. */ \
9186 switch (pVCpu->iem.s.enmEffOpSize) \
9187 { \
9188 case IEMMODE_16BIT: \
9189 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9191 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
9192 IEM_MC_ARG(uint16_t, u16Src, 1); \
9193 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9194 \
9195 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9196 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
9197 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9198 IEM_MC_REF_EFLAGS(pEFlags); \
9199 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9200 \
9201 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9202 IEM_MC_END(); \
9203 break; \
9204 \
9205 case IEMMODE_32BIT: \
9206 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9208 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
9209 IEM_MC_ARG(uint32_t, u32Src, 1); \
9210 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9211 \
9212 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9213 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9214 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9215 IEM_MC_REF_EFLAGS(pEFlags); \
9216 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9217 \
9218 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9219 IEM_MC_END(); \
9220 break; \
9221 \
9222 case IEMMODE_64BIT: \
9223 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9225 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
9226 IEM_MC_ARG(uint64_t, u64Src, 1); \
9227 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9228 \
9229 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9230 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9231 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9232 IEM_MC_REF_EFLAGS(pEFlags); \
9233 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9234 \
9235 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9236 IEM_MC_END(); \
9237 break; \
9238 \
9239 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9240 } \
9241 } \
9242 else \
9243 { \
9244 /* memory destination. */ \
9245 /** @todo test negative bit offsets! */ \
9246 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9247 { \
9248 switch (pVCpu->iem.s.enmEffOpSize) \
9249 { \
9250 case IEMMODE_16BIT: \
9251 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9254 IEMOP_HLP_DONE_DECODING(); \
9255 \
9256 IEM_MC_ARG(uint16_t, u16Src, 1); \
9257 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9258 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9259 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9260 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9261 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9262 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9263 \
9264 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9265 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
9266 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9267 \
9268 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9269 IEM_MC_FETCH_EFLAGS(EFlags); \
9270 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9271 \
9272 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
9273 IEM_MC_COMMIT_EFLAGS(EFlags); \
9274 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9275 IEM_MC_END(); \
9276 break; \
9277 \
9278 case IEMMODE_32BIT: \
9279 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9282 IEMOP_HLP_DONE_DECODING(); \
9283 \
9284 IEM_MC_ARG(uint32_t, u32Src, 1); \
9285 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9286 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9287 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9288 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9289 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9290 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9291 \
9292 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
9293 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9294 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9295 \
9296 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9297 IEM_MC_FETCH_EFLAGS(EFlags); \
9298 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9299 \
9300 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
9301 IEM_MC_COMMIT_EFLAGS(EFlags); \
9302 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9303 IEM_MC_END(); \
9304 break; \
9305 \
9306 case IEMMODE_64BIT: \
9307 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0); \
9308 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9309 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9310 IEMOP_HLP_DONE_DECODING(); \
9311 \
9312 IEM_MC_ARG(uint64_t, u64Src, 1); \
9313 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9314 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9315 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9316 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9317 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9318 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9319 \
9320 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9321 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
9322 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9323 \
9324 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9325 IEM_MC_FETCH_EFLAGS(EFlags); \
9326 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9327 \
9328 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
9329 IEM_MC_COMMIT_EFLAGS(EFlags); \
9330 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9331 IEM_MC_END(); \
9332 break; \
9333 \
9334 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9335 } \
9336 } \
9337 else \
9338 { \
9339 IEMOP_HLP_DONE_DECODING(); \
9340 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
9341 } \
9342 } \
9343 (void)0
9344
9345
9346/** Opcode 0x0f 0xa3. */
9347FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9348{
9349 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9350 IEMOP_HLP_MIN_386();
9351 IEMOP_BODY_BIT_Ev_Gv_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
9352}
9353
9354
9355/**
9356 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9357 */
9358FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
9359{
9360 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9361 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9362
9363 if (IEM_IS_MODRM_REG_MODE(bRm))
9364 {
9365 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9366
9367 switch (pVCpu->iem.s.enmEffOpSize)
9368 {
9369 case IEMMODE_16BIT:
9370 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0);
9371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9372 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9373 IEM_MC_ARG(uint16_t, u16Src, 1);
9374 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9375 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9376
9377 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9378 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9379 IEM_MC_REF_EFLAGS(pEFlags);
9380 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9381
9382 IEM_MC_ADVANCE_RIP_AND_FINISH();
9383 IEM_MC_END();
9384 break;
9385
9386 case IEMMODE_32BIT:
9387 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0);
9388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9389 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9390 IEM_MC_ARG(uint32_t, u32Src, 1);
9391 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9392 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9393
9394 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9395 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9396 IEM_MC_REF_EFLAGS(pEFlags);
9397 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9398
9399 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
9400 IEM_MC_ADVANCE_RIP_AND_FINISH();
9401 IEM_MC_END();
9402 break;
9403
9404 case IEMMODE_64BIT:
9405 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT, 0);
9406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9407 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9408 IEM_MC_ARG(uint64_t, u64Src, 1);
9409 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9410 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9411
9412 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9413 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9414 IEM_MC_REF_EFLAGS(pEFlags);
9415 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9416
9417 IEM_MC_ADVANCE_RIP_AND_FINISH();
9418 IEM_MC_END();
9419 break;
9420
9421 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9422 }
9423 }
9424 else
9425 {
9426 switch (pVCpu->iem.s.enmEffOpSize)
9427 {
9428 case IEMMODE_16BIT:
9429 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0);
9430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9432
9433 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9435
9436 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9437 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9438 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9439
9440 IEM_MC_ARG(uint16_t, u16Src, 1);
9441 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9442 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2);
9443 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9444 IEM_MC_FETCH_EFLAGS(EFlags);
9445 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9446
9447 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
9448 IEM_MC_COMMIT_EFLAGS(EFlags);
9449 IEM_MC_ADVANCE_RIP_AND_FINISH();
9450 IEM_MC_END();
9451 break;
9452
9453 case IEMMODE_32BIT:
9454 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0);
9455 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9457
9458 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9460
9461 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9462 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9463 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9464
9465 IEM_MC_ARG(uint32_t, u32Src, 1);
9466 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9467 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2);
9468 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9469 IEM_MC_FETCH_EFLAGS(EFlags);
9470 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9471
9472 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
9473 IEM_MC_COMMIT_EFLAGS(EFlags);
9474 IEM_MC_ADVANCE_RIP_AND_FINISH();
9475 IEM_MC_END();
9476 break;
9477
9478 case IEMMODE_64BIT:
9479 IEM_MC_BEGIN(4, 3, IEM_MC_F_64BIT, 0);
9480 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9482
9483 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9485
9486 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9487 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9488 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9489
9490 IEM_MC_ARG(uint64_t, u64Src, 1);
9491 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9492 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2);
9493 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9494 IEM_MC_FETCH_EFLAGS(EFlags);
9495
9496 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9497
9498 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
9499 IEM_MC_COMMIT_EFLAGS(EFlags);
9500 IEM_MC_ADVANCE_RIP_AND_FINISH();
9501 IEM_MC_END();
9502 break;
9503
9504 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9505 }
9506 }
9507}
9508
9509
9510/**
9511 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9512 */
9513FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
9514{
9515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9516 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9517
9518 if (IEM_IS_MODRM_REG_MODE(bRm))
9519 {
9520 switch (pVCpu->iem.s.enmEffOpSize)
9521 {
9522 case IEMMODE_16BIT:
9523 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0);
9524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9525 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9526 IEM_MC_ARG(uint16_t, u16Src, 1);
9527 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9528 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9529
9530 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9531 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9532 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9533 IEM_MC_REF_EFLAGS(pEFlags);
9534 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9535
9536 IEM_MC_ADVANCE_RIP_AND_FINISH();
9537 IEM_MC_END();
9538 break;
9539
9540 case IEMMODE_32BIT:
9541 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0);
9542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9543 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9544 IEM_MC_ARG(uint32_t, u32Src, 1);
9545 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9546 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9547
9548 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9549 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9550 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9551 IEM_MC_REF_EFLAGS(pEFlags);
9552 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9553
9554 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
9555 IEM_MC_ADVANCE_RIP_AND_FINISH();
9556 IEM_MC_END();
9557 break;
9558
9559 case IEMMODE_64BIT:
9560 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT, 0);
9561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9562 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9563 IEM_MC_ARG(uint64_t, u64Src, 1);
9564 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9565 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9566
9567 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9568 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9569 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9570 IEM_MC_REF_EFLAGS(pEFlags);
9571 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9572
9573 IEM_MC_ADVANCE_RIP_AND_FINISH();
9574 IEM_MC_END();
9575 break;
9576
9577 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9578 }
9579 }
9580 else
9581 {
9582 switch (pVCpu->iem.s.enmEffOpSize)
9583 {
9584 case IEMMODE_16BIT:
9585 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0);
9586 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9587 IEM_MC_ARG(uint16_t, u16Src, 1);
9588 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9589 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9591 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9592
9593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9595 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9596 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9597 IEM_MC_FETCH_EFLAGS(EFlags);
9598 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9599 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9600
9601 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
9602 IEM_MC_COMMIT_EFLAGS(EFlags);
9603 IEM_MC_ADVANCE_RIP_AND_FINISH();
9604 IEM_MC_END();
9605 break;
9606
9607 case IEMMODE_32BIT:
9608 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0);
9609 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9610 IEM_MC_ARG(uint32_t, u32Src, 1);
9611 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9612 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9614 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9615
9616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9618 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9619 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9620 IEM_MC_FETCH_EFLAGS(EFlags);
9621 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9622 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9623
9624 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
9625 IEM_MC_COMMIT_EFLAGS(EFlags);
9626 IEM_MC_ADVANCE_RIP_AND_FINISH();
9627 IEM_MC_END();
9628 break;
9629
9630 case IEMMODE_64BIT:
9631 IEM_MC_BEGIN(4, 3, IEM_MC_F_64BIT, 0);
9632 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9633 IEM_MC_ARG(uint64_t, u64Src, 1);
9634 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9635 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9636 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9637 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9638
9639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9641 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9642 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9643 IEM_MC_FETCH_EFLAGS(EFlags);
9644 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9645 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9646
9647 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
9648 IEM_MC_COMMIT_EFLAGS(EFlags);
9649 IEM_MC_ADVANCE_RIP_AND_FINISH();
9650 IEM_MC_END();
9651 break;
9652
9653 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9654 }
9655 }
9656}
9657
9658
9659
9660/** Opcode 0x0f 0xa4. */
9661FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9662{
9663 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9664 IEMOP_HLP_MIN_386();
9665 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9666}
9667
9668
9669/** Opcode 0x0f 0xa5. */
9670FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9671{
9672 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9673 IEMOP_HLP_MIN_386();
9674 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9675}
9676
9677
9678/** Opcode 0x0f 0xa8. */
9679FNIEMOP_DEF(iemOp_push_gs)
9680{
9681 IEMOP_MNEMONIC(push_gs, "push gs");
9682 IEMOP_HLP_MIN_386();
9683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9684 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9685}
9686
9687
9688/** Opcode 0x0f 0xa9. */
9689FNIEMOP_DEF(iemOp_pop_gs)
9690{
9691 IEMOP_MNEMONIC(pop_gs, "pop gs");
9692 IEMOP_HLP_MIN_386();
9693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9694 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9695}
9696
9697
9698/** Opcode 0x0f 0xaa. */
9699FNIEMOP_DEF(iemOp_rsm)
9700{
9701 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9702 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9704 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
9705 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
9706 iemCImpl_rsm);
9707}
9708
9709
9710
9711/** Opcode 0x0f 0xab. */
9712FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9713{
9714 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9715 IEMOP_HLP_MIN_386();
9716 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
9717 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9718}
9719
9720
9721/** Opcode 0x0f 0xac. */
9722FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9723{
9724 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9725 IEMOP_HLP_MIN_386();
9726 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9727}
9728
9729
9730/** Opcode 0x0f 0xad. */
9731FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9732{
9733 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9734 IEMOP_HLP_MIN_386();
9735 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9736}
9737
9738
9739/** Opcode 0x0f 0xae mem/0. */
9740FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9741{
9742 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9743 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9744 IEMOP_RAISE_INVALID_OPCODE_RET();
9745
9746 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_PENTIUM_II, 0);
9747 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9750 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9751 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9752 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9753 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9754 IEM_MC_END();
9755}
9756
9757
9758/** Opcode 0x0f 0xae mem/1. */
9759FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9760{
9761 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9762 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9763 IEMOP_RAISE_INVALID_OPCODE_RET();
9764
9765 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_PENTIUM_II, 0);
9766 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9769 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9770 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9771 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9772 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9773 IEM_MC_END();
9774}
9775
9776
9777/**
9778 * @opmaps grp15
9779 * @opcode !11/2
9780 * @oppfx none
9781 * @opcpuid sse
9782 * @opgroup og_sse_mxcsrsm
9783 * @opxcpttype 5
9784 * @optest op1=0 -> mxcsr=0
9785 * @optest op1=0x2083 -> mxcsr=0x2083
9786 * @optest op1=0xfffffffe -> value.xcpt=0xd
9787 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9788 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9789 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9790 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9791 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9792 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9793 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9794 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9795 */
9796FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9797{
9798 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9799 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9800 IEMOP_RAISE_INVALID_OPCODE_RET();
9801
9802 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_PENTIUM_II, 0);
9803 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9806 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9807 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9808 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9809 IEM_MC_END();
9810}
9811
9812
9813/**
9814 * @opmaps grp15
9815 * @opcode !11/3
9816 * @oppfx none
9817 * @opcpuid sse
9818 * @opgroup og_sse_mxcsrsm
9819 * @opxcpttype 5
9820 * @optest mxcsr=0 -> op1=0
9821 * @optest mxcsr=0x2083 -> op1=0x2083
9822 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9823 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9824 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9825 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9826 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9827 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9828 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9829 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9830 */
9831FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9832{
9833 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9834 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9835 IEMOP_RAISE_INVALID_OPCODE_RET();
9836
9837 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_PENTIUM_II, 0);
9838 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9841 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9842 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9843 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9844 IEM_MC_END();
9845}
9846
9847
9848/**
9849 * @opmaps grp15
9850 * @opcode !11/4
9851 * @oppfx none
9852 * @opcpuid xsave
9853 * @opgroup og_system
9854 * @opxcpttype none
9855 */
9856FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9857{
9858 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9859 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9860 IEMOP_RAISE_INVALID_OPCODE_RET();
9861
9862 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_CORE, 0);
9863 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9866 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9867 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9868 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9869 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9870 IEM_MC_END();
9871}
9872
9873
9874/**
9875 * @opmaps grp15
9876 * @opcode !11/5
9877 * @oppfx none
9878 * @opcpuid xsave
9879 * @opgroup og_system
9880 * @opxcpttype none
9881 */
9882FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9883{
9884 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9885 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9886 IEMOP_RAISE_INVALID_OPCODE_RET();
9887
9888 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_CORE, 0);
9889 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9890 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9892 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9893 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9894 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9895 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9896 IEM_MC_END();
9897}
9898
9899/** Opcode 0x0f 0xae mem/6. */
9900FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9901
9902/**
9903 * @opmaps grp15
9904 * @opcode !11/7
9905 * @oppfx none
9906 * @opcpuid clfsh
9907 * @opgroup og_cachectl
9908 * @optest op1=1 ->
9909 */
9910FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9911{
9912 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9913 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9914 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9915
9916 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
9917 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9918 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9920 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9921 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9922 IEM_MC_END();
9923}
9924
9925/**
9926 * @opmaps grp15
9927 * @opcode !11/7
9928 * @oppfx 0x66
9929 * @opcpuid clflushopt
9930 * @opgroup og_cachectl
9931 * @optest op1=1 ->
9932 */
9933FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9934{
9935 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9936 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9937 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9938
9939 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
9940 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9943 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9944 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9945 IEM_MC_END();
9946}
9947
9948
9949/** Opcode 0x0f 0xae 11b/5. */
9950FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9951{
9952 RT_NOREF_PV(bRm);
9953 IEMOP_MNEMONIC(lfence, "lfence");
9954 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
9955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9956#ifdef RT_ARCH_ARM64
9957 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9958#else
9959 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9960 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9961 else
9962 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9963#endif
9964 IEM_MC_ADVANCE_RIP_AND_FINISH();
9965 IEM_MC_END();
9966}
9967
9968
9969/** Opcode 0x0f 0xae 11b/6. */
9970FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9971{
9972 RT_NOREF_PV(bRm);
9973 IEMOP_MNEMONIC(mfence, "mfence");
9974 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
9975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9976#ifdef RT_ARCH_ARM64
9977 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9978#else
9979 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9980 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9981 else
9982 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9983#endif
9984 IEM_MC_ADVANCE_RIP_AND_FINISH();
9985 IEM_MC_END();
9986}
9987
9988
9989/** Opcode 0x0f 0xae 11b/7. */
9990FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9991{
9992 RT_NOREF_PV(bRm);
9993 IEMOP_MNEMONIC(sfence, "sfence");
9994 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
9995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9996#ifdef RT_ARCH_ARM64
9997 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9998#else
9999 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
10000 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
10001 else
10002 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
10003#endif
10004 IEM_MC_ADVANCE_RIP_AND_FINISH();
10005 IEM_MC_END();
10006}
10007
10008
10009/** Opcode 0xf3 0x0f 0xae 11b/0. */
10010FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
10011{
10012 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
10013 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10014 {
10015 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10017 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10018 IEM_MC_LOCAL(uint64_t, u64Dst);
10019 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
10020 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10021 IEM_MC_ADVANCE_RIP_AND_FINISH();
10022 IEM_MC_END();
10023 }
10024 else
10025 {
10026 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
10027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10028 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10029 IEM_MC_LOCAL(uint32_t, u32Dst);
10030 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
10031 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10032 IEM_MC_ADVANCE_RIP_AND_FINISH();
10033 IEM_MC_END();
10034 }
10035}
10036
10037
10038/** Opcode 0xf3 0x0f 0xae 11b/1. */
10039FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
10040{
10041 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
10042 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10043 {
10044 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10046 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10047 IEM_MC_LOCAL(uint64_t, u64Dst);
10048 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
10049 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10050 IEM_MC_ADVANCE_RIP_AND_FINISH();
10051 IEM_MC_END();
10052 }
10053 else
10054 {
10055 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
10056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10057 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10058 IEM_MC_LOCAL(uint32_t, u32Dst);
10059 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
10060 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10061 IEM_MC_ADVANCE_RIP_AND_FINISH();
10062 IEM_MC_END();
10063 }
10064}
10065
10066
10067/** Opcode 0xf3 0x0f 0xae 11b/2. */
10068FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
10069{
10070 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
10071 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10072 {
10073 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10075 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10076 IEM_MC_LOCAL(uint64_t, u64Dst);
10077 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10078 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10079 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
10080 IEM_MC_ADVANCE_RIP_AND_FINISH();
10081 IEM_MC_END();
10082 }
10083 else
10084 {
10085 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
10086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10087 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10088 IEM_MC_LOCAL(uint32_t, u32Dst);
10089 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10090 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
10091 IEM_MC_ADVANCE_RIP_AND_FINISH();
10092 IEM_MC_END();
10093 }
10094}
10095
10096
10097/** Opcode 0xf3 0x0f 0xae 11b/3. */
10098FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
10099{
10100 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
10101 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10102 {
10103 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10105 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10106 IEM_MC_LOCAL(uint64_t, u64Dst);
10107 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10108 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10109 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
10110 IEM_MC_ADVANCE_RIP_AND_FINISH();
10111 IEM_MC_END();
10112 }
10113 else
10114 {
10115 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
10116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10117 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10118 IEM_MC_LOCAL(uint32_t, u32Dst);
10119 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10120 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10121 IEM_MC_ADVANCE_RIP_AND_FINISH();
10122 IEM_MC_END();
10123 }
10124}
10125
10126
10127/**
10128 * Group 15 jump table for register variant.
10129 */
10130IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10131{ /* pfx: none, 066h, 0f3h, 0f2h */
10132 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10133 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10134 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10135 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10136 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10137 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10138 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10139 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10140};
10141AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10142
10143
10144/**
10145 * Group 15 jump table for memory variant.
10146 */
10147IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10148{ /* pfx: none, 066h, 0f3h, 0f2h */
10149 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10150 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10151 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10152 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10153 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10154 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10155 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10156 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10157};
10158AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10159
10160
10161/** Opcode 0x0f 0xae. */
10162FNIEMOP_DEF(iemOp_Grp15)
10163{
10164 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10166 if (IEM_IS_MODRM_REG_MODE(bRm))
10167 /* register, register */
10168 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10169 + pVCpu->iem.s.idxPrefix], bRm);
10170 /* memory, register */
10171 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10172 + pVCpu->iem.s.idxPrefix], bRm);
10173}
10174
10175
10176/** Opcode 0x0f 0xaf. */
10177FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10178{
10179 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10180 IEMOP_HLP_MIN_386();
10181 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10182 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10183 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_MIN_386);
10184}
10185
10186
10187/** Opcode 0x0f 0xb0. */
10188FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10189{
10190 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10191 IEMOP_HLP_MIN_486();
10192 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10193
10194 if (IEM_IS_MODRM_REG_MODE(bRm))
10195 {
10196 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486, 0);
10197 IEMOP_HLP_DONE_DECODING();
10198 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10199 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10200 IEM_MC_ARG(uint8_t, u8Src, 2);
10201 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10202
10203 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10204 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10205 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10206 IEM_MC_REF_EFLAGS(pEFlags);
10207 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10208 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10209 else
10210 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10211
10212 IEM_MC_ADVANCE_RIP_AND_FINISH();
10213 IEM_MC_END();
10214 }
10215 else
10216 {
10217 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0);
10218 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10219 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10220 IEM_MC_ARG(uint8_t, u8Src, 2);
10221 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10223 IEM_MC_LOCAL(uint8_t, u8Al);
10224 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10225
10226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10227 IEMOP_HLP_DONE_DECODING();
10228 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10229 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10230 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
10231 IEM_MC_FETCH_EFLAGS(EFlags);
10232 IEM_MC_REF_LOCAL(pu8Al, u8Al);
10233 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10234 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10235 else
10236 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10237
10238 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
10239 IEM_MC_COMMIT_EFLAGS(EFlags);
10240 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
10241 IEM_MC_ADVANCE_RIP_AND_FINISH();
10242 IEM_MC_END();
10243 }
10244}
10245
10246/** Opcode 0x0f 0xb1. */
10247FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10248{
10249 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10250 IEMOP_HLP_MIN_486();
10251 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10252
10253 if (IEM_IS_MODRM_REG_MODE(bRm))
10254 {
10255 switch (pVCpu->iem.s.enmEffOpSize)
10256 {
10257 case IEMMODE_16BIT:
10258 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486, 0);
10259 IEMOP_HLP_DONE_DECODING();
10260 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10261 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10262 IEM_MC_ARG(uint16_t, u16Src, 2);
10263 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10264
10265 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10266 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10267 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10268 IEM_MC_REF_EFLAGS(pEFlags);
10269 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10270 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10271 else
10272 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10273
10274 IEM_MC_ADVANCE_RIP_AND_FINISH();
10275 IEM_MC_END();
10276 break;
10277
10278 case IEMMODE_32BIT:
10279 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486, 0);
10280 IEMOP_HLP_DONE_DECODING();
10281 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10282 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10283 IEM_MC_ARG(uint32_t, u32Src, 2);
10284 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10285
10286 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10287 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10288 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10289 IEM_MC_REF_EFLAGS(pEFlags);
10290 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10291 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10292 else
10293 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10294
10295 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10296 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
10297 } IEM_MC_ELSE() {
10298 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
10299 } IEM_MC_ENDIF();
10300
10301 IEM_MC_ADVANCE_RIP_AND_FINISH();
10302 IEM_MC_END();
10303 break;
10304
10305 case IEMMODE_64BIT:
10306 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT, 0);
10307 IEMOP_HLP_DONE_DECODING();
10308 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10309 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10310#ifdef RT_ARCH_X86
10311 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10312#else
10313 IEM_MC_ARG(uint64_t, u64Src, 2);
10314#endif
10315 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10316
10317 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10318 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10319 IEM_MC_REF_EFLAGS(pEFlags);
10320#ifdef RT_ARCH_X86
10321 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10322 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10323 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10324 else
10325 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10326#else
10327 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10328 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10329 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10330 else
10331 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10332#endif
10333
10334 IEM_MC_ADVANCE_RIP_AND_FINISH();
10335 IEM_MC_END();
10336 break;
10337
10338 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10339 }
10340 }
10341 else
10342 {
10343 switch (pVCpu->iem.s.enmEffOpSize)
10344 {
10345 case IEMMODE_16BIT:
10346 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0);
10347 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10348 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10349 IEM_MC_ARG(uint16_t, u16Src, 2);
10350 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10351 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10352 IEM_MC_LOCAL(uint16_t, u16Ax);
10353 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10354
10355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10356 IEMOP_HLP_DONE_DECODING();
10357 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10358 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10359 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
10360 IEM_MC_FETCH_EFLAGS(EFlags);
10361 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
10362 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10363 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10364 else
10365 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10366
10367 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
10368 IEM_MC_COMMIT_EFLAGS(EFlags);
10369 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
10370 IEM_MC_ADVANCE_RIP_AND_FINISH();
10371 IEM_MC_END();
10372 break;
10373
10374 case IEMMODE_32BIT:
10375 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0);
10376 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10377 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10378 IEM_MC_ARG(uint32_t, u32Src, 2);
10379 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10381 IEM_MC_LOCAL(uint32_t, u32Eax);
10382 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10383
10384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10385 IEMOP_HLP_DONE_DECODING();
10386 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10387 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10388 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
10389 IEM_MC_FETCH_EFLAGS(EFlags);
10390 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
10391 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10392 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10393 else
10394 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10395
10396 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
10397 IEM_MC_COMMIT_EFLAGS(EFlags);
10398
10399 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10400 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
10401 } IEM_MC_ENDIF();
10402
10403 IEM_MC_ADVANCE_RIP_AND_FINISH();
10404 IEM_MC_END();
10405 break;
10406
10407 case IEMMODE_64BIT:
10408 IEM_MC_BEGIN(4, 4, IEM_MC_F_64BIT, 0);
10409 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10410 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10411#ifdef RT_ARCH_X86
10412 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10413#else
10414 IEM_MC_ARG(uint64_t, u64Src, 2);
10415#endif
10416 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10418 IEM_MC_LOCAL(uint64_t, u64Rax);
10419 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10420
10421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10422 IEMOP_HLP_DONE_DECODING();
10423 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10424 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
10425 IEM_MC_FETCH_EFLAGS(EFlags);
10426 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
10427#ifdef RT_ARCH_X86
10428 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10429 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10430 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10431 else
10432 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10433#else
10434 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10435 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10436 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10437 else
10438 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10439#endif
10440
10441 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
10442 IEM_MC_COMMIT_EFLAGS(EFlags);
10443 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
10444 IEM_MC_ADVANCE_RIP_AND_FINISH();
10445 IEM_MC_END();
10446 break;
10447
10448 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10449 }
10450 }
10451}
10452
10453
10454/** Opcode 0x0f 0xb2. */
10455FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10456{
10457 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10458 IEMOP_HLP_MIN_386();
10459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10460 if (IEM_IS_MODRM_REG_MODE(bRm))
10461 IEMOP_RAISE_INVALID_OPCODE_RET();
10462 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10463}
10464
10465
10466/** Opcode 0x0f 0xb3. */
10467FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10468{
10469 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10470 IEMOP_HLP_MIN_386();
10471 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
10472 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10473}
10474
10475
10476/** Opcode 0x0f 0xb4. */
10477FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10478{
10479 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10480 IEMOP_HLP_MIN_386();
10481 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10482 if (IEM_IS_MODRM_REG_MODE(bRm))
10483 IEMOP_RAISE_INVALID_OPCODE_RET();
10484 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10485}
10486
10487
10488/** Opcode 0x0f 0xb5. */
10489FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10490{
10491 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10492 IEMOP_HLP_MIN_386();
10493 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10494 if (IEM_IS_MODRM_REG_MODE(bRm))
10495 IEMOP_RAISE_INVALID_OPCODE_RET();
10496 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10497}
10498
10499
10500/** Opcode 0x0f 0xb6. */
10501FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10502{
10503 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10504 IEMOP_HLP_MIN_386();
10505
10506 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10507
10508 /*
10509 * If rm is denoting a register, no more instruction bytes.
10510 */
10511 if (IEM_IS_MODRM_REG_MODE(bRm))
10512 {
10513 switch (pVCpu->iem.s.enmEffOpSize)
10514 {
10515 case IEMMODE_16BIT:
10516 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
10517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10518 IEM_MC_LOCAL(uint16_t, u16Value);
10519 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10520 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10521 IEM_MC_ADVANCE_RIP_AND_FINISH();
10522 IEM_MC_END();
10523 break;
10524
10525 case IEMMODE_32BIT:
10526 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
10527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10528 IEM_MC_LOCAL(uint32_t, u32Value);
10529 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10530 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10531 IEM_MC_ADVANCE_RIP_AND_FINISH();
10532 IEM_MC_END();
10533 break;
10534
10535 case IEMMODE_64BIT:
10536 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10538 IEM_MC_LOCAL(uint64_t, u64Value);
10539 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10540 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10541 IEM_MC_ADVANCE_RIP_AND_FINISH();
10542 IEM_MC_END();
10543 break;
10544
10545 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10546 }
10547 }
10548 else
10549 {
10550 /*
10551 * We're loading a register from memory.
10552 */
10553 switch (pVCpu->iem.s.enmEffOpSize)
10554 {
10555 case IEMMODE_16BIT:
10556 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
10557 IEM_MC_LOCAL(uint16_t, u16Value);
10558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10561 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10562 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10563 IEM_MC_ADVANCE_RIP_AND_FINISH();
10564 IEM_MC_END();
10565 break;
10566
10567 case IEMMODE_32BIT:
10568 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
10569 IEM_MC_LOCAL(uint32_t, u32Value);
10570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10573 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10574 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10575 IEM_MC_ADVANCE_RIP_AND_FINISH();
10576 IEM_MC_END();
10577 break;
10578
10579 case IEMMODE_64BIT:
10580 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
10581 IEM_MC_LOCAL(uint64_t, u64Value);
10582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10585 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10586 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10587 IEM_MC_ADVANCE_RIP_AND_FINISH();
10588 IEM_MC_END();
10589 break;
10590
10591 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10592 }
10593 }
10594}
10595
10596
10597/** Opcode 0x0f 0xb7. */
10598FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10599{
10600 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10601 IEMOP_HLP_MIN_386();
10602
10603 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10604
10605 /** @todo Not entirely sure how the operand size prefix is handled here,
10606 * assuming that it will be ignored. Would be nice to have a few
10607 * test for this. */
10608
10609 /** @todo There should be no difference in the behaviour whether REX.W is
10610 * present or not... */
10611
10612 /*
10613 * If rm is denoting a register, no more instruction bytes.
10614 */
10615 if (IEM_IS_MODRM_REG_MODE(bRm))
10616 {
10617 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10618 {
10619 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
10620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10621 IEM_MC_LOCAL(uint32_t, u32Value);
10622 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10623 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10624 IEM_MC_ADVANCE_RIP_AND_FINISH();
10625 IEM_MC_END();
10626 }
10627 else
10628 {
10629 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10631 IEM_MC_LOCAL(uint64_t, u64Value);
10632 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10633 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10634 IEM_MC_ADVANCE_RIP_AND_FINISH();
10635 IEM_MC_END();
10636 }
10637 }
10638 else
10639 {
10640 /*
10641 * We're loading a register from memory.
10642 */
10643 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10644 {
10645 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
10646 IEM_MC_LOCAL(uint32_t, u32Value);
10647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10650 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10651 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10652 IEM_MC_ADVANCE_RIP_AND_FINISH();
10653 IEM_MC_END();
10654 }
10655 else
10656 {
10657 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
10658 IEM_MC_LOCAL(uint64_t, u64Value);
10659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10662 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10663 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10664 IEM_MC_ADVANCE_RIP_AND_FINISH();
10665 IEM_MC_END();
10666 }
10667 }
10668}
10669
10670
10671/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10672FNIEMOP_UD_STUB(iemOp_jmpe);
10673
10674
10675/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
10676FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10677{
10678 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10679 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10680 return iemOp_InvalidNeedRM(pVCpu);
10681#ifndef TST_IEM_CHECK_MC
10682# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10683 static const IEMOPBINSIZES s_Native =
10684 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10685# endif
10686 static const IEMOPBINSIZES s_Fallback =
10687 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10688#endif
10689 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10690 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_NOT_286_OR_OLDER);
10691}
10692
10693
10694/**
10695 * @opcode 0xb9
10696 * @opinvalid intel-modrm
10697 * @optest ->
10698 */
10699FNIEMOP_DEF(iemOp_Grp10)
10700{
10701 /*
10702 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10703 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10704 */
10705 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10706 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10707 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10708}
10709
10710
10711/**
10712 * Body for group 8 bit instruction.
10713 */
10714#define IEMOP_BODY_BIT_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10715 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10716 \
10717 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10718 { \
10719 /* register destination. */ \
10720 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10721 \
10722 switch (pVCpu->iem.s.enmEffOpSize) \
10723 { \
10724 case IEMMODE_16BIT: \
10725 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
10726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10727 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10728 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10729 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10730 \
10731 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10732 IEM_MC_REF_EFLAGS(pEFlags); \
10733 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10734 \
10735 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10736 IEM_MC_END(); \
10737 break; \
10738 \
10739 case IEMMODE_32BIT: \
10740 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
10741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10742 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10743 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10744 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10745 \
10746 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10747 IEM_MC_REF_EFLAGS(pEFlags); \
10748 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10749 \
10750 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
10751 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10752 IEM_MC_END(); \
10753 break; \
10754 \
10755 case IEMMODE_64BIT: \
10756 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
10757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10758 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10759 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10760 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10761 \
10762 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10763 IEM_MC_REF_EFLAGS(pEFlags); \
10764 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10765 \
10766 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10767 IEM_MC_END(); \
10768 break; \
10769 \
10770 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10771 } \
10772 } \
10773 else \
10774 { \
10775 /* memory destination. */ \
10776 /** @todo test negative bit offsets! */ \
10777 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10778 { \
10779 switch (pVCpu->iem.s.enmEffOpSize) \
10780 { \
10781 case IEMMODE_16BIT: \
10782 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
10783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10785 \
10786 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10787 IEMOP_HLP_DONE_DECODING(); \
10788 \
10789 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10790 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10791 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10792 \
10793 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10794 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10795 IEM_MC_FETCH_EFLAGS(EFlags); \
10796 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10797 \
10798 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
10799 IEM_MC_COMMIT_EFLAGS(EFlags); \
10800 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10801 IEM_MC_END(); \
10802 break; \
10803 \
10804 case IEMMODE_32BIT: \
10805 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
10806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10808 \
10809 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10810 IEMOP_HLP_DONE_DECODING(); \
10811 \
10812 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10813 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10814 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10815 \
10816 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10817 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10818 IEM_MC_FETCH_EFLAGS(EFlags); \
10819 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10820 \
10821 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
10822 IEM_MC_COMMIT_EFLAGS(EFlags); \
10823 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10824 IEM_MC_END(); \
10825 break; \
10826 \
10827 case IEMMODE_64BIT: \
10828 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
10829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10831 \
10832 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10833 IEMOP_HLP_DONE_DECODING(); \
10834 \
10835 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10836 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10837 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10838 \
10839 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10840 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10841 IEM_MC_FETCH_EFLAGS(EFlags); \
10842 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10843 \
10844 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
10845 IEM_MC_COMMIT_EFLAGS(EFlags); \
10846 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10847 IEM_MC_END(); \
10848 break; \
10849 \
10850 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10851 } \
10852 } \
10853 else \
10854 { \
10855 (void)0
10856/* Separate macro to work around parsing issue in IEMAllInstPython.py */
10857#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
10858 switch (pVCpu->iem.s.enmEffOpSize) \
10859 { \
10860 case IEMMODE_16BIT: \
10861 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
10862 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10864 \
10865 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10866 IEMOP_HLP_DONE_DECODING(); \
10867 \
10868 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10869 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10870 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10871 \
10872 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10873 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10874 IEM_MC_FETCH_EFLAGS(EFlags); \
10875 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
10876 \
10877 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
10878 IEM_MC_COMMIT_EFLAGS(EFlags); \
10879 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10880 IEM_MC_END(); \
10881 break; \
10882 \
10883 case IEMMODE_32BIT: \
10884 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
10885 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10887 \
10888 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10889 IEMOP_HLP_DONE_DECODING(); \
10890 \
10891 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10892 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10893 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10894 \
10895 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10896 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10897 IEM_MC_FETCH_EFLAGS(EFlags); \
10898 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
10899 \
10900 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
10901 IEM_MC_COMMIT_EFLAGS(EFlags); \
10902 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10903 IEM_MC_END(); \
10904 break; \
10905 \
10906 case IEMMODE_64BIT: \
10907 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
10908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10910 \
10911 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10912 IEMOP_HLP_DONE_DECODING(); \
10913 \
10914 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10915 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10916 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10917 \
10918 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10919 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10920 IEM_MC_FETCH_EFLAGS(EFlags); \
10921 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
10922 \
10923 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
10924 IEM_MC_COMMIT_EFLAGS(EFlags); \
10925 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10926 IEM_MC_END(); \
10927 break; \
10928 \
10929 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10930 } \
10931 } \
10932 } \
10933 (void)0
10934
10935/* Read-only version (bt) */
10936#define IEMOP_BODY_BIT_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10937 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10938 \
10939 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10940 { \
10941 /* register destination. */ \
10942 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10943 \
10944 switch (pVCpu->iem.s.enmEffOpSize) \
10945 { \
10946 case IEMMODE_16BIT: \
10947 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
10948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10949 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
10950 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10951 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10952 \
10953 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10954 IEM_MC_REF_EFLAGS(pEFlags); \
10955 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10956 \
10957 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10958 IEM_MC_END(); \
10959 break; \
10960 \
10961 case IEMMODE_32BIT: \
10962 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
10963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10964 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
10965 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10966 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10967 \
10968 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10969 IEM_MC_REF_EFLAGS(pEFlags); \
10970 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10971 \
10972 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10973 IEM_MC_END(); \
10974 break; \
10975 \
10976 case IEMMODE_64BIT: \
10977 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
10978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10979 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
10980 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10981 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10982 \
10983 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10984 IEM_MC_REF_EFLAGS(pEFlags); \
10985 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10986 \
10987 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10988 IEM_MC_END(); \
10989 break; \
10990 \
10991 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10992 } \
10993 } \
10994 else \
10995 { \
10996 /* memory destination. */ \
10997 /** @todo test negative bit offsets! */ \
10998 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10999 { \
11000 switch (pVCpu->iem.s.enmEffOpSize) \
11001 { \
11002 case IEMMODE_16BIT: \
11003 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
11004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11006 \
11007 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11008 IEMOP_HLP_DONE_DECODING(); \
11009 \
11010 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11011 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
11012 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11013 \
11014 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
11015 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11016 IEM_MC_FETCH_EFLAGS(EFlags); \
11017 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
11018 \
11019 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
11020 IEM_MC_COMMIT_EFLAGS(EFlags); \
11021 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11022 IEM_MC_END(); \
11023 break; \
11024 \
11025 case IEMMODE_32BIT: \
11026 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
11027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11029 \
11030 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11031 IEMOP_HLP_DONE_DECODING(); \
11032 \
11033 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11034 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
11035 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11036 \
11037 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
11038 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11039 IEM_MC_FETCH_EFLAGS(EFlags); \
11040 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
11041 \
11042 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
11043 IEM_MC_COMMIT_EFLAGS(EFlags); \
11044 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11045 IEM_MC_END(); \
11046 break; \
11047 \
11048 case IEMMODE_64BIT: \
11049 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
11050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11052 \
11053 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11054 IEMOP_HLP_DONE_DECODING(); \
11055 \
11056 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11057 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
11058 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11059 \
11060 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
11061 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11062 IEM_MC_FETCH_EFLAGS(EFlags); \
11063 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
11064 \
11065 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
11066 IEM_MC_COMMIT_EFLAGS(EFlags); \
11067 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11068 IEM_MC_END(); \
11069 break; \
11070 \
11071 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11072 } \
11073 } \
11074 else \
11075 { \
11076 IEMOP_HLP_DONE_DECODING(); \
11077 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
11078 } \
11079 } \
11080 (void)0
11081
11082
11083/** Opcode 0x0f 0xba /4. */
11084FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
11085{
11086 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
11087 IEMOP_BODY_BIT_Ev_Ib_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
11088}
11089
11090
11091/** Opcode 0x0f 0xba /5. */
11092FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
11093{
11094 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
11095 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
11096 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
11097}
11098
11099
11100/** Opcode 0x0f 0xba /6. */
11101FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
11102{
11103 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
11104 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
11105 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
11106}
11107
11108
11109/** Opcode 0x0f 0xba /7. */
11110FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
11111{
11112 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
11113 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11114 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11115}
11116
11117
11118/** Opcode 0x0f 0xba. */
11119FNIEMOP_DEF(iemOp_Grp8)
11120{
11121 IEMOP_HLP_MIN_386();
11122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11123 switch (IEM_GET_MODRM_REG_8(bRm))
11124 {
11125 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
11126 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
11127 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
11128 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
11129
11130 case 0: case 1: case 2: case 3:
11131 /* Both AMD and Intel want full modr/m decoding and imm8. */
11132 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
11133
11134 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11135 }
11136}
11137
11138
11139/** Opcode 0x0f 0xbb. */
11140FNIEMOP_DEF(iemOp_btc_Ev_Gv)
11141{
11142 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
11143 IEMOP_HLP_MIN_386();
11144 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11145 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11146}
11147
11148
11149/**
11150 * Common worker for BSF and BSR instructions.
11151 *
11152 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
11153 * the destination register, which means that for 32-bit operations the high
11154 * bits must be left alone.
11155 *
11156 * @param pImpl Pointer to the instruction implementation (assembly).
11157 */
11158FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
11159{
11160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11161
11162 /*
11163 * If rm is denoting a register, no more instruction bytes.
11164 */
11165 if (IEM_IS_MODRM_REG_MODE(bRm))
11166 {
11167 switch (pVCpu->iem.s.enmEffOpSize)
11168 {
11169 case IEMMODE_16BIT:
11170 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
11171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11172 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11173 IEM_MC_ARG(uint16_t, u16Src, 1);
11174 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11175
11176 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11177 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11178 IEM_MC_REF_EFLAGS(pEFlags);
11179 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11180
11181 IEM_MC_ADVANCE_RIP_AND_FINISH();
11182 IEM_MC_END();
11183 break;
11184
11185 case IEMMODE_32BIT:
11186 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
11187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11188 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11189 IEM_MC_ARG(uint32_t, u32Src, 1);
11190 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11191
11192 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11193 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11194 IEM_MC_REF_EFLAGS(pEFlags);
11195 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11196 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11197 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11198 } IEM_MC_ENDIF();
11199 IEM_MC_ADVANCE_RIP_AND_FINISH();
11200 IEM_MC_END();
11201 break;
11202
11203 case IEMMODE_64BIT:
11204 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
11205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11206 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11207 IEM_MC_ARG(uint64_t, u64Src, 1);
11208 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11209
11210 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11211 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11212 IEM_MC_REF_EFLAGS(pEFlags);
11213 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11214
11215 IEM_MC_ADVANCE_RIP_AND_FINISH();
11216 IEM_MC_END();
11217 break;
11218
11219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11220 }
11221 }
11222 else
11223 {
11224 /*
11225 * We're accessing memory.
11226 */
11227 switch (pVCpu->iem.s.enmEffOpSize)
11228 {
11229 case IEMMODE_16BIT:
11230 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
11231 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11232 IEM_MC_ARG(uint16_t, u16Src, 1);
11233 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11235
11236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11238 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11239 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11240 IEM_MC_REF_EFLAGS(pEFlags);
11241 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11242
11243 IEM_MC_ADVANCE_RIP_AND_FINISH();
11244 IEM_MC_END();
11245 break;
11246
11247 case IEMMODE_32BIT:
11248 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
11249 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11250 IEM_MC_ARG(uint32_t, u32Src, 1);
11251 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11253
11254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11256 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11257 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11258 IEM_MC_REF_EFLAGS(pEFlags);
11259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11260
11261 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11262 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11263 } IEM_MC_ENDIF();
11264 IEM_MC_ADVANCE_RIP_AND_FINISH();
11265 IEM_MC_END();
11266 break;
11267
11268 case IEMMODE_64BIT:
11269 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
11270 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11271 IEM_MC_ARG(uint64_t, u64Src, 1);
11272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11274
11275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11277 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11278 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11279 IEM_MC_REF_EFLAGS(pEFlags);
11280 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11281
11282 IEM_MC_ADVANCE_RIP_AND_FINISH();
11283 IEM_MC_END();
11284 break;
11285
11286 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11287 }
11288 }
11289}
11290
11291
11292/** Opcode 0x0f 0xbc. */
11293FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11294{
11295 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11296 IEMOP_HLP_MIN_386();
11297 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11298 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
11299}
11300
11301
11302/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
11303FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11304{
11305 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11306 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11307 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11308
11309#ifndef TST_IEM_CHECK_MC
11310 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
11311 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11312 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
11313 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11314 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
11315 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11316 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11317 {
11318 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11319 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11320 };
11321#endif
11322 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11323 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11324 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11325 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_NOT_286_OR_OLDER);
11326}
11327
11328
11329/** Opcode 0x0f 0xbd. */
11330FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11331{
11332 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11333 IEMOP_HLP_MIN_386();
11334 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11335 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
11336}
11337
11338
11339/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
11340FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11341{
11342 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11343 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11344 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11345
11346#ifndef TST_IEM_CHECK_MC
11347 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11348 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11349 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11350 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11351 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11352 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11353 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11354 {
11355 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11356 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11357 };
11358#endif
11359 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11360 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11361 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11362 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_NOT_286_OR_OLDER);
11363}
11364
11365
11366
11367/** Opcode 0x0f 0xbe. */
11368FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11369{
11370 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11371 IEMOP_HLP_MIN_386();
11372
11373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11374
11375 /*
11376 * If rm is denoting a register, no more instruction bytes.
11377 */
11378 if (IEM_IS_MODRM_REG_MODE(bRm))
11379 {
11380 switch (pVCpu->iem.s.enmEffOpSize)
11381 {
11382 case IEMMODE_16BIT:
11383 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
11384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11385 IEM_MC_LOCAL(uint16_t, u16Value);
11386 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11387 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11388 IEM_MC_ADVANCE_RIP_AND_FINISH();
11389 IEM_MC_END();
11390 break;
11391
11392 case IEMMODE_32BIT:
11393 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
11394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11395 IEM_MC_LOCAL(uint32_t, u32Value);
11396 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11397 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11398 IEM_MC_ADVANCE_RIP_AND_FINISH();
11399 IEM_MC_END();
11400 break;
11401
11402 case IEMMODE_64BIT:
11403 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
11404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11405 IEM_MC_LOCAL(uint64_t, u64Value);
11406 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11407 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11408 IEM_MC_ADVANCE_RIP_AND_FINISH();
11409 IEM_MC_END();
11410 break;
11411
11412 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11413 }
11414 }
11415 else
11416 {
11417 /*
11418 * We're loading a register from memory.
11419 */
11420 switch (pVCpu->iem.s.enmEffOpSize)
11421 {
11422 case IEMMODE_16BIT:
11423 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
11424 IEM_MC_LOCAL(uint16_t, u16Value);
11425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11428 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11429 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11430 IEM_MC_ADVANCE_RIP_AND_FINISH();
11431 IEM_MC_END();
11432 break;
11433
11434 case IEMMODE_32BIT:
11435 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
11436 IEM_MC_LOCAL(uint32_t, u32Value);
11437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11440 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11441 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11442 IEM_MC_ADVANCE_RIP_AND_FINISH();
11443 IEM_MC_END();
11444 break;
11445
11446 case IEMMODE_64BIT:
11447 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
11448 IEM_MC_LOCAL(uint64_t, u64Value);
11449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11452 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11453 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11454 IEM_MC_ADVANCE_RIP_AND_FINISH();
11455 IEM_MC_END();
11456 break;
11457
11458 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11459 }
11460 }
11461}
11462
11463
11464/** Opcode 0x0f 0xbf. */
11465FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11466{
11467 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11468 IEMOP_HLP_MIN_386();
11469
11470 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11471
11472 /** @todo Not entirely sure how the operand size prefix is handled here,
11473 * assuming that it will be ignored. Would be nice to have a few
11474 * test for this. */
11475 /*
11476 * If rm is denoting a register, no more instruction bytes.
11477 */
11478 if (IEM_IS_MODRM_REG_MODE(bRm))
11479 {
11480 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11481 {
11482 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
11483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11484 IEM_MC_LOCAL(uint32_t, u32Value);
11485 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11486 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11487 IEM_MC_ADVANCE_RIP_AND_FINISH();
11488 IEM_MC_END();
11489 }
11490 else
11491 {
11492 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
11493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11494 IEM_MC_LOCAL(uint64_t, u64Value);
11495 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11496 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11497 IEM_MC_ADVANCE_RIP_AND_FINISH();
11498 IEM_MC_END();
11499 }
11500 }
11501 else
11502 {
11503 /*
11504 * We're loading a register from memory.
11505 */
11506 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11507 {
11508 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
11509 IEM_MC_LOCAL(uint32_t, u32Value);
11510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11513 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11514 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11515 IEM_MC_ADVANCE_RIP_AND_FINISH();
11516 IEM_MC_END();
11517 }
11518 else
11519 {
11520 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
11521 IEM_MC_LOCAL(uint64_t, u64Value);
11522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11523 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11525 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11526 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11527 IEM_MC_ADVANCE_RIP_AND_FINISH();
11528 IEM_MC_END();
11529 }
11530 }
11531}
11532
11533
11534/** Opcode 0x0f 0xc0. */
11535FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11536{
11537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11538 IEMOP_HLP_MIN_486();
11539 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11540
11541 /*
11542 * If rm is denoting a register, no more instruction bytes.
11543 */
11544 if (IEM_IS_MODRM_REG_MODE(bRm))
11545 {
11546 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486, 0);
11547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11548 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11549 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11550 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11551
11552 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11553 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11554 IEM_MC_REF_EFLAGS(pEFlags);
11555 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11556
11557 IEM_MC_ADVANCE_RIP_AND_FINISH();
11558 IEM_MC_END();
11559 }
11560 else
11561 {
11562 /*
11563 * We're accessing memory.
11564 */
11565 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0);
11566 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11567 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11568 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11569 IEM_MC_LOCAL(uint8_t, u8RegCopy);
11570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11571 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11572
11573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11574 IEMOP_HLP_DONE_DECODING();
11575 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11576 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11577 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
11578 IEM_MC_FETCH_EFLAGS(EFlags);
11579 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11580 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11581 else
11582 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
11583
11584 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
11585 IEM_MC_COMMIT_EFLAGS(EFlags);
11586 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
11587 IEM_MC_ADVANCE_RIP_AND_FINISH();
11588 IEM_MC_END();
11589 }
11590}
11591
11592
11593/** Opcode 0x0f 0xc1. */
11594FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11595{
11596 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11597 IEMOP_HLP_MIN_486();
11598 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11599
11600 /*
11601 * If rm is denoting a register, no more instruction bytes.
11602 */
11603 if (IEM_IS_MODRM_REG_MODE(bRm))
11604 {
11605 switch (pVCpu->iem.s.enmEffOpSize)
11606 {
11607 case IEMMODE_16BIT:
11608 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486, 0);
11609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11610 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11611 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11612 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11613
11614 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11615 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11616 IEM_MC_REF_EFLAGS(pEFlags);
11617 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11618
11619 IEM_MC_ADVANCE_RIP_AND_FINISH();
11620 IEM_MC_END();
11621 break;
11622
11623 case IEMMODE_32BIT:
11624 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486, 0);
11625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11626 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11627 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11628 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11629
11630 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11631 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11632 IEM_MC_REF_EFLAGS(pEFlags);
11633 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11634
11635 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
11636 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11637 IEM_MC_ADVANCE_RIP_AND_FINISH();
11638 IEM_MC_END();
11639 break;
11640
11641 case IEMMODE_64BIT:
11642 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
11643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11644 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11645 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11646 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11647
11648 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11649 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11650 IEM_MC_REF_EFLAGS(pEFlags);
11651 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11652
11653 IEM_MC_ADVANCE_RIP_AND_FINISH();
11654 IEM_MC_END();
11655 break;
11656
11657 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11658 }
11659 }
11660 else
11661 {
11662 /*
11663 * We're accessing memory.
11664 */
11665 switch (pVCpu->iem.s.enmEffOpSize)
11666 {
11667 case IEMMODE_16BIT:
11668 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0);
11669 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11670 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11671 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11672 IEM_MC_LOCAL(uint16_t, u16RegCopy);
11673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11674 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11675
11676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11677 IEMOP_HLP_DONE_DECODING();
11678 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11679 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11680 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
11681 IEM_MC_FETCH_EFLAGS(EFlags);
11682 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11683 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11684 else
11685 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
11686
11687 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
11688 IEM_MC_COMMIT_EFLAGS(EFlags);
11689 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
11690 IEM_MC_ADVANCE_RIP_AND_FINISH();
11691 IEM_MC_END();
11692 break;
11693
11694 case IEMMODE_32BIT:
11695 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0);
11696 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11697 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11698 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11699 IEM_MC_LOCAL(uint32_t, u32RegCopy);
11700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11701 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11702
11703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11704 IEMOP_HLP_DONE_DECODING();
11705 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11706 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11707 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
11708 IEM_MC_FETCH_EFLAGS(EFlags);
11709 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11710 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11711 else
11712 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
11713
11714 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
11715 IEM_MC_COMMIT_EFLAGS(EFlags);
11716 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
11717 IEM_MC_ADVANCE_RIP_AND_FINISH();
11718 IEM_MC_END();
11719 break;
11720
11721 case IEMMODE_64BIT:
11722 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
11723 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11724 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11725 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11726 IEM_MC_LOCAL(uint64_t, u64RegCopy);
11727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11728 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11729
11730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11731 IEMOP_HLP_DONE_DECODING();
11732 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11733 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11734 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
11735 IEM_MC_FETCH_EFLAGS(EFlags);
11736 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11737 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11738 else
11739 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
11740
11741 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
11742 IEM_MC_COMMIT_EFLAGS(EFlags);
11743 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
11744 IEM_MC_ADVANCE_RIP_AND_FINISH();
11745 IEM_MC_END();
11746 break;
11747
11748 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11749 }
11750 }
11751}
11752
11753
11754/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11755FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11756{
11757 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11758
11759 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11760 if (IEM_IS_MODRM_REG_MODE(bRm))
11761 {
11762 /*
11763 * XMM, XMM.
11764 */
11765 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
11766 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11768 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11769 IEM_MC_LOCAL(X86XMMREG, Dst);
11770 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11771 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11772 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11773 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11774 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11775 IEM_MC_PREPARE_SSE_USAGE();
11776 IEM_MC_REF_MXCSR(pfMxcsr);
11777 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11778 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11779 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11780 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11781 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11782 } IEM_MC_ELSE() {
11783 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11784 } IEM_MC_ENDIF();
11785
11786 IEM_MC_ADVANCE_RIP_AND_FINISH();
11787 IEM_MC_END();
11788 }
11789 else
11790 {
11791 /*
11792 * XMM, [mem128].
11793 */
11794 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
11795 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11796 IEM_MC_LOCAL(X86XMMREG, Dst);
11797 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11798 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11799 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11801
11802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11803 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11804 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11806 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11807 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11808
11809 IEM_MC_PREPARE_SSE_USAGE();
11810 IEM_MC_REF_MXCSR(pfMxcsr);
11811 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11812 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11813 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11814 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11815 } IEM_MC_ELSE() {
11816 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11817 } IEM_MC_ENDIF();
11818
11819 IEM_MC_ADVANCE_RIP_AND_FINISH();
11820 IEM_MC_END();
11821 }
11822}
11823
11824
11825/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11826FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11827{
11828 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11829
11830 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11831 if (IEM_IS_MODRM_REG_MODE(bRm))
11832 {
11833 /*
11834 * XMM, XMM.
11835 */
11836 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
11837 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11839 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11840 IEM_MC_LOCAL(X86XMMREG, Dst);
11841 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11842 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11843 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11844 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11845 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11846 IEM_MC_PREPARE_SSE_USAGE();
11847 IEM_MC_REF_MXCSR(pfMxcsr);
11848 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11849 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11850 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11851 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11852 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11853 } IEM_MC_ELSE() {
11854 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11855 } IEM_MC_ENDIF();
11856
11857 IEM_MC_ADVANCE_RIP_AND_FINISH();
11858 IEM_MC_END();
11859 }
11860 else
11861 {
11862 /*
11863 * XMM, [mem128].
11864 */
11865 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
11866 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11867 IEM_MC_LOCAL(X86XMMREG, Dst);
11868 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11869 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11870 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11872
11873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11874 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11875 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11877 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11878 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11879
11880 IEM_MC_PREPARE_SSE_USAGE();
11881 IEM_MC_REF_MXCSR(pfMxcsr);
11882 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11883 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11884 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11885 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11886 } IEM_MC_ELSE() {
11887 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11888 } IEM_MC_ENDIF();
11889
11890 IEM_MC_ADVANCE_RIP_AND_FINISH();
11891 IEM_MC_END();
11892 }
11893}
11894
11895
11896/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11897FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11898{
11899 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11900
11901 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11902 if (IEM_IS_MODRM_REG_MODE(bRm))
11903 {
11904 /*
11905 * XMM32, XMM32.
11906 */
11907 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
11908 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11910 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11911 IEM_MC_LOCAL(X86XMMREG, Dst);
11912 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11913 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11914 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11915 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11916 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11917 IEM_MC_PREPARE_SSE_USAGE();
11918 IEM_MC_REF_MXCSR(pfMxcsr);
11919 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11920 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11921 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11922 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11923 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11924 } IEM_MC_ELSE() {
11925 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11926 } IEM_MC_ENDIF();
11927
11928 IEM_MC_ADVANCE_RIP_AND_FINISH();
11929 IEM_MC_END();
11930 }
11931 else
11932 {
11933 /*
11934 * XMM32, [mem32].
11935 */
11936 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
11937 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11938 IEM_MC_LOCAL(X86XMMREG, Dst);
11939 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11940 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11941 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11943
11944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11945 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11946 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11948 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11949 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11950
11951 IEM_MC_PREPARE_SSE_USAGE();
11952 IEM_MC_REF_MXCSR(pfMxcsr);
11953 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11954 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11955 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11956 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11957 } IEM_MC_ELSE() {
11958 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11959 } IEM_MC_ENDIF();
11960
11961 IEM_MC_ADVANCE_RIP_AND_FINISH();
11962 IEM_MC_END();
11963 }
11964}
11965
11966
11967/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11968FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11969{
11970 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11971
11972 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11973 if (IEM_IS_MODRM_REG_MODE(bRm))
11974 {
11975 /*
11976 * XMM64, XMM64.
11977 */
11978 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
11979 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11981 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11982 IEM_MC_LOCAL(X86XMMREG, Dst);
11983 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11984 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11985 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11986 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11987 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11988 IEM_MC_PREPARE_SSE_USAGE();
11989 IEM_MC_REF_MXCSR(pfMxcsr);
11990 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11991 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11992 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11993 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11994 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11995 } IEM_MC_ELSE() {
11996 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11997 } IEM_MC_ENDIF();
11998
11999 IEM_MC_ADVANCE_RIP_AND_FINISH();
12000 IEM_MC_END();
12001 }
12002 else
12003 {
12004 /*
12005 * XMM64, [mem64].
12006 */
12007 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
12008 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12009 IEM_MC_LOCAL(X86XMMREG, Dst);
12010 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12011 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12012 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12014
12015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12016 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12017 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12019 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12020 IEM_MC_FETCH_MEM_XMM_U64(Src.uSrc2, 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12021
12022 IEM_MC_PREPARE_SSE_USAGE();
12023 IEM_MC_REF_MXCSR(pfMxcsr);
12024 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
12025 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
12026 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12027 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12028 } IEM_MC_ELSE() {
12029 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
12030 } IEM_MC_ENDIF();
12031
12032 IEM_MC_ADVANCE_RIP_AND_FINISH();
12033 IEM_MC_END();
12034 }
12035}
12036
12037
12038/** Opcode 0x0f 0xc3. */
12039FNIEMOP_DEF(iemOp_movnti_My_Gy)
12040{
12041 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
12042
12043 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12044
12045 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
12046 if (IEM_IS_MODRM_MEM_MODE(bRm))
12047 {
12048 switch (pVCpu->iem.s.enmEffOpSize)
12049 {
12050 case IEMMODE_32BIT:
12051 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
12052 IEM_MC_LOCAL(uint32_t, u32Value);
12053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12054
12055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12057
12058 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12059 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
12060 IEM_MC_ADVANCE_RIP_AND_FINISH();
12061 IEM_MC_END();
12062 break;
12063
12064 case IEMMODE_64BIT:
12065 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
12066 IEM_MC_LOCAL(uint64_t, u64Value);
12067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12068
12069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12071
12072 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12073 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
12074 IEM_MC_ADVANCE_RIP_AND_FINISH();
12075 IEM_MC_END();
12076 break;
12077
12078 case IEMMODE_16BIT:
12079 /** @todo check this form. */
12080 IEMOP_RAISE_INVALID_OPCODE_RET();
12081
12082 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12083 }
12084 }
12085 else
12086 IEMOP_RAISE_INVALID_OPCODE_RET();
12087}
12088
12089
12090/* Opcode 0x66 0x0f 0xc3 - invalid */
12091/* Opcode 0xf3 0x0f 0xc3 - invalid */
12092/* Opcode 0xf2 0x0f 0xc3 - invalid */
12093
12094
12095/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
12096FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
12097{
12098 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12099 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12100 if (IEM_IS_MODRM_REG_MODE(bRm))
12101 {
12102 /*
12103 * Register, register.
12104 */
12105 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12106 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12108 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12109 IEM_MC_ARG(uint16_t, u16Src, 1);
12110 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12111 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12112 IEM_MC_PREPARE_FPU_USAGE();
12113 IEM_MC_FPU_TO_MMX_MODE();
12114 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
12115 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
12116 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
12117 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
12118 IEM_MC_ADVANCE_RIP_AND_FINISH();
12119 IEM_MC_END();
12120 }
12121 else
12122 {
12123 /*
12124 * Register, memory.
12125 */
12126 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
12127 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12128 IEM_MC_ARG(uint16_t, u16Src, 1);
12129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12130
12131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12132 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12133 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12135 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12136 IEM_MC_PREPARE_FPU_USAGE();
12137 IEM_MC_FPU_TO_MMX_MODE();
12138
12139 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12140 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
12141 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
12142 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
12143 IEM_MC_ADVANCE_RIP_AND_FINISH();
12144 IEM_MC_END();
12145 }
12146}
12147
12148
12149/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
12150FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
12151{
12152 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12154 if (IEM_IS_MODRM_REG_MODE(bRm))
12155 {
12156 /*
12157 * Register, register.
12158 */
12159 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12160 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12162 IEM_MC_ARG(PRTUINT128U, puDst, 0);
12163 IEM_MC_ARG(uint16_t, u16Src, 1);
12164 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12165 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12166 IEM_MC_PREPARE_SSE_USAGE();
12167 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
12168 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12169 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
12170 IEM_MC_ADVANCE_RIP_AND_FINISH();
12171 IEM_MC_END();
12172 }
12173 else
12174 {
12175 /*
12176 * Register, memory.
12177 */
12178 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12179 IEM_MC_ARG(PRTUINT128U, puDst, 0);
12180 IEM_MC_ARG(uint16_t, u16Src, 1);
12181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12182
12183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12184 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12185 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12187 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12188 IEM_MC_PREPARE_SSE_USAGE();
12189
12190 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12191 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12192 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
12193 IEM_MC_ADVANCE_RIP_AND_FINISH();
12194 IEM_MC_END();
12195 }
12196}
12197
12198
12199/* Opcode 0xf3 0x0f 0xc4 - invalid */
12200/* Opcode 0xf2 0x0f 0xc4 - invalid */
12201
12202
12203/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
12204FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
12205{
12206 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
12207 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12208 if (IEM_IS_MODRM_REG_MODE(bRm))
12209 {
12210 /*
12211 * Greg32, MMX, imm8.
12212 */
12213 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
12214 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12216 IEM_MC_LOCAL(uint16_t, u16Dst);
12217 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
12218 IEM_MC_ARG(uint64_t, u64Src, 1);
12219 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12220 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12221 IEM_MC_PREPARE_FPU_USAGE();
12222 IEM_MC_FPU_TO_MMX_MODE();
12223 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
12224 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u64, pu16Dst, u64Src, bImmArg);
12225 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12226 IEM_MC_ADVANCE_RIP_AND_FINISH();
12227 IEM_MC_END();
12228 }
12229 /* No memory operand. */
12230 else
12231 IEMOP_RAISE_INVALID_OPCODE_RET();
12232}
12233
12234
12235/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
12236FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
12237{
12238 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12239 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12240 if (IEM_IS_MODRM_REG_MODE(bRm))
12241 {
12242 /*
12243 * Greg32, XMM, imm8.
12244 */
12245 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
12246 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12248 IEM_MC_LOCAL(uint16_t, u16Dst);
12249 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
12250 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12251 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12252 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12253 IEM_MC_PREPARE_SSE_USAGE();
12254 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12255 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u128, pu16Dst, puSrc, bImmArg);
12256 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12257 IEM_MC_ADVANCE_RIP_AND_FINISH();
12258 IEM_MC_END();
12259 }
12260 /* No memory operand. */
12261 else
12262 IEMOP_RAISE_INVALID_OPCODE_RET();
12263}
12264
12265
12266/* Opcode 0xf3 0x0f 0xc5 - invalid */
12267/* Opcode 0xf2 0x0f 0xc5 - invalid */
12268
12269
12270/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
12271FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
12272{
12273 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12274 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12275 if (IEM_IS_MODRM_REG_MODE(bRm))
12276 {
12277 /*
12278 * XMM, XMM, imm8.
12279 */
12280 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12281 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12283 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12284 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12285 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12286 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12287 IEM_MC_PREPARE_SSE_USAGE();
12288 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12289 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12290 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12291 IEM_MC_ADVANCE_RIP_AND_FINISH();
12292 IEM_MC_END();
12293 }
12294 else
12295 {
12296 /*
12297 * XMM, [mem128], imm8.
12298 */
12299 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12300 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12301 IEM_MC_LOCAL(RTUINT128U, uSrc);
12302 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12304
12305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12306 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12307 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12309 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12310 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12311
12312 IEM_MC_PREPARE_SSE_USAGE();
12313 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12314 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12315
12316 IEM_MC_ADVANCE_RIP_AND_FINISH();
12317 IEM_MC_END();
12318 }
12319}
12320
12321
12322/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12323FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12324{
12325 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12327 if (IEM_IS_MODRM_REG_MODE(bRm))
12328 {
12329 /*
12330 * XMM, XMM, imm8.
12331 */
12332 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12333 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12335 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12336 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12337 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12338 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12339 IEM_MC_PREPARE_SSE_USAGE();
12340 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12341 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12342 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12343 IEM_MC_ADVANCE_RIP_AND_FINISH();
12344 IEM_MC_END();
12345 }
12346 else
12347 {
12348 /*
12349 * XMM, [mem128], imm8.
12350 */
12351 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12352 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12353 IEM_MC_LOCAL(RTUINT128U, uSrc);
12354 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12356
12357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12358 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12359 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12361 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12362 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12363
12364 IEM_MC_PREPARE_SSE_USAGE();
12365 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12366 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12367
12368 IEM_MC_ADVANCE_RIP_AND_FINISH();
12369 IEM_MC_END();
12370 }
12371}
12372
12373
12374/* Opcode 0xf3 0x0f 0xc6 - invalid */
12375/* Opcode 0xf2 0x0f 0xc6 - invalid */
12376
12377
12378/** Opcode 0x0f 0xc7 !11/1. */
12379FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12380{
12381 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12382
12383 IEM_MC_BEGIN(4, 5, IEM_MC_F_NOT_286_OR_OLDER, 0);
12384 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
12385 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
12386 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
12387 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12388 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
12389 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
12390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12391 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12392
12393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12394 IEMOP_HLP_DONE_DECODING_EX(fCmpXchg8b);
12395 IEM_MC_MEM_MAP_U64_RW(pu64MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12396
12397 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
12398 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
12399 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
12400
12401 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
12402 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
12403 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
12404
12405 IEM_MC_FETCH_EFLAGS(EFlags);
12406 if ( !(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)
12407 && (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12408 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12409 else
12410 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12411
12412 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64MemDst, bUnmapInfo);
12413 IEM_MC_COMMIT_EFLAGS(EFlags);
12414 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12415 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
12416 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
12417 } IEM_MC_ENDIF();
12418 IEM_MC_ADVANCE_RIP_AND_FINISH();
12419
12420 IEM_MC_END();
12421}
12422
12423
12424/** Opcode REX.W 0x0f 0xc7 !11/1. */
12425FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12426{
12427 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12428 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12429 {
12430 /*
12431 * This is hairy, very hairy macro fun. We're walking a fine line
12432 * here to make the code parsable by IEMAllInstPython.py and fit into
12433 * the patterns IEMAllThrdPython.py requires for the code morphing.
12434 */
12435#define BODY_CMPXCHG16B_HEAD \
12436 IEM_MC_BEGIN(4, 3, IEM_MC_F_64BIT, 0); \
12437 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0); \
12438 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1); \
12439 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2); \
12440 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3); \
12441 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx); \
12442 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx); \
12443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12444 \
12445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12446 IEMOP_HLP_DONE_DECODING(); \
12447 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16); \
12448 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
12449 \
12450 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX); \
12451 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX); \
12452 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx); \
12453 \
12454 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX); \
12455 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX); \
12456 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx); \
12457 \
12458 IEM_MC_FETCH_EFLAGS(EFlags); \
12459 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(X86_GREG_xAX); \
12460 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(X86_GREG_xDX)
12461
12462#define BODY_CMPXCHG16B_TAIL \
12463 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW); \
12464 IEM_MC_COMMIT_EFLAGS(EFlags); \
12465 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12466 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo); \
12467 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi); \
12468 } IEM_MC_ENDIF(); \
12469 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12470 IEM_MC_END()
12471
12472#ifdef RT_ARCH_AMD64
12473 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12474 {
12475 if ( !(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)
12476 && (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12477 {
12478 BODY_CMPXCHG16B_HEAD;
12479 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12480 BODY_CMPXCHG16B_TAIL;
12481 }
12482 else
12483 {
12484 BODY_CMPXCHG16B_HEAD;
12485 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12486 BODY_CMPXCHG16B_TAIL;
12487 }
12488 }
12489 else
12490 { /* (see comments in #else case below) */
12491 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12492 {
12493 BODY_CMPXCHG16B_HEAD;
12494 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12495 BODY_CMPXCHG16B_TAIL;
12496 }
12497 else
12498 {
12499 BODY_CMPXCHG16B_HEAD;
12500 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12501 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12502 IEM_MC_END();
12503 }
12504 }
12505
12506#elif defined(RT_ARCH_ARM64)
12507 /** @todo may require fallback for unaligned accesses... */
12508 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12509 {
12510 BODY_CMPXCHG16B_HEAD;
12511 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12512 BODY_CMPXCHG16B_TAIL;
12513 }
12514 else
12515 {
12516 BODY_CMPXCHG16B_HEAD;
12517 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12518 BODY_CMPXCHG16B_TAIL;
12519 }
12520
12521#else
12522 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12523 accesses and not all all atomic, which works fine on in UNI CPU guest
12524 configuration (ignoring DMA). If guest SMP is active we have no choice
12525 but to use a rendezvous callback here. Sigh. */
12526 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12527 {
12528 BODY_CMPXCHG16B_HEAD;
12529 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12530 BODY_CMPXCHG16B_TAIL;
12531 }
12532 else
12533 {
12534 BODY_CMPXCHG16B_HEAD;
12535 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_cmpxchg16b_fallback_rendezvous,
12536 pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12537 IEM_MC_END();
12538 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12539 }
12540#endif
12541
12542#undef BODY_CMPXCHG16B
12543 }
12544 Log(("cmpxchg16b -> #UD\n"));
12545 IEMOP_RAISE_INVALID_OPCODE_RET();
12546}
12547
12548FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12549{
12550 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12551 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12552 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12553}
12554
12555
12556/** Opcode 0x0f 0xc7 11/6. */
12557FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12558{
12559 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12560 IEMOP_RAISE_INVALID_OPCODE_RET();
12561
12562 if (IEM_IS_MODRM_REG_MODE(bRm))
12563 {
12564 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12566 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12567 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12568 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(IEM_GET_MODRM_RM(pVCpu, bRm));
12569 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, iemCImpl_rdrand, iReg, enmEffOpSize);
12570 IEM_MC_END();
12571 }
12572 /* Register only. */
12573 else
12574 IEMOP_RAISE_INVALID_OPCODE_RET();
12575}
12576
12577/** Opcode 0x0f 0xc7 !11/6. */
12578#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12579FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12580{
12581 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12582 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12583 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12584 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12585 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12587 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12588 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12589 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12590 IEM_MC_END();
12591}
12592#else
12593FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12594#endif
12595
12596/** Opcode 0x66 0x0f 0xc7 !11/6. */
12597#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12598FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12599{
12600 IEMOP_MNEMONIC(vmclear, "vmclear");
12601 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12602 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12603 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12604 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12606 IEMOP_HLP_DONE_DECODING();
12607 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12608 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12609 IEM_MC_END();
12610}
12611#else
12612FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12613#endif
12614
12615/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12616#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12617FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12618{
12619 IEMOP_MNEMONIC(vmxon, "vmxon");
12620 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12621 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12622 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12624 IEMOP_HLP_DONE_DECODING();
12625 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12626 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12627 IEM_MC_END();
12628}
12629#else
12630FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12631#endif
12632
12633/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12634#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12635FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12636{
12637 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12638 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12639 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12640 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12641 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12643 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12644 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12645 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12646 IEM_MC_END();
12647}
12648#else
12649FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12650#endif
12651
12652/** Opcode 0x0f 0xc7 11/7. */
12653FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12654{
12655 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12656 IEMOP_RAISE_INVALID_OPCODE_RET();
12657
12658 if (IEM_IS_MODRM_REG_MODE(bRm))
12659 {
12660 /* register destination. */
12661 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12663 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12664 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12665 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(IEM_GET_MODRM_RM(pVCpu, bRm));
12666 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, iemCImpl_rdseed, iReg, enmEffOpSize);
12667 IEM_MC_END();
12668 }
12669 /* Register only. */
12670 else
12671 IEMOP_RAISE_INVALID_OPCODE_RET();
12672}
12673
12674/**
12675 * Group 9 jump table for register variant.
12676 */
12677IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12678{ /* pfx: none, 066h, 0f3h, 0f2h */
12679 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12680 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12681 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12682 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12683 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12684 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12685 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12686 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12687};
12688AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12689
12690
12691/**
12692 * Group 9 jump table for memory variant.
12693 */
12694IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12695{ /* pfx: none, 066h, 0f3h, 0f2h */
12696 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12697 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12698 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12699 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12700 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12701 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12702 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12703 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12704};
12705AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12706
12707
12708/** Opcode 0x0f 0xc7. */
12709FNIEMOP_DEF(iemOp_Grp9)
12710{
12711 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12712 if (IEM_IS_MODRM_REG_MODE(bRm))
12713 /* register, register */
12714 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12715 + pVCpu->iem.s.idxPrefix], bRm);
12716 /* memory, register */
12717 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12718 + pVCpu->iem.s.idxPrefix], bRm);
12719}
12720
12721
12722/**
12723 * Common 'bswap register' helper.
12724 */
12725FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12726{
12727 switch (pVCpu->iem.s.enmEffOpSize)
12728 {
12729 case IEMMODE_16BIT:
12730 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_486, 0);
12731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12732 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12733 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12734 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12735 IEM_MC_ADVANCE_RIP_AND_FINISH();
12736 IEM_MC_END();
12737 break;
12738
12739 case IEMMODE_32BIT:
12740 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_486, 0);
12741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12742 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12743 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12744 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12745 IEM_MC_CLEAR_HIGH_GREG_U64(iReg);
12746 IEM_MC_ADVANCE_RIP_AND_FINISH();
12747 IEM_MC_END();
12748 break;
12749
12750 case IEMMODE_64BIT:
12751 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
12752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12753 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12754 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12755 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12756 IEM_MC_ADVANCE_RIP_AND_FINISH();
12757 IEM_MC_END();
12758 break;
12759
12760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12761 }
12762}
12763
12764
12765/** Opcode 0x0f 0xc8. */
12766FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12767{
12768 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12769 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12770 prefix. REX.B is the correct prefix it appears. For a parallel
12771 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12772 IEMOP_HLP_MIN_486();
12773 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12774}
12775
12776
12777/** Opcode 0x0f 0xc9. */
12778FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12779{
12780 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12781 IEMOP_HLP_MIN_486();
12782 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12783}
12784
12785
12786/** Opcode 0x0f 0xca. */
12787FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12788{
12789 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12790 IEMOP_HLP_MIN_486();
12791 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12792}
12793
12794
12795/** Opcode 0x0f 0xcb. */
12796FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12797{
12798 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12799 IEMOP_HLP_MIN_486();
12800 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12801}
12802
12803
12804/** Opcode 0x0f 0xcc. */
12805FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12806{
12807 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12808 IEMOP_HLP_MIN_486();
12809 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12810}
12811
12812
12813/** Opcode 0x0f 0xcd. */
12814FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12815{
12816 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12817 IEMOP_HLP_MIN_486();
12818 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12819}
12820
12821
12822/** Opcode 0x0f 0xce. */
12823FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12824{
12825 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12826 IEMOP_HLP_MIN_486();
12827 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12828}
12829
12830
12831/** Opcode 0x0f 0xcf. */
12832FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12833{
12834 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12835 IEMOP_HLP_MIN_486();
12836 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12837}
12838
12839
12840/* Opcode 0x0f 0xd0 - invalid */
12841
12842
12843/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12844FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12845{
12846 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12847 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12848}
12849
12850
12851/* Opcode 0xf3 0x0f 0xd0 - invalid */
12852
12853
12854/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12855FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12856{
12857 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12858 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12859}
12860
12861
12862
12863/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12864FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12865{
12866 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12867 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12868}
12869
12870/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12871FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12872{
12873 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12874 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12875}
12876
12877/* Opcode 0xf3 0x0f 0xd1 - invalid */
12878/* Opcode 0xf2 0x0f 0xd1 - invalid */
12879
12880/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12881FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12882{
12883 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12884 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12885}
12886
12887
12888/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12889FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12890{
12891 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12892 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12893}
12894
12895
12896/* Opcode 0xf3 0x0f 0xd2 - invalid */
12897/* Opcode 0xf2 0x0f 0xd2 - invalid */
12898
12899/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12900FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12901{
12902 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12903 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12904}
12905
12906
12907/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12908FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12909{
12910 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12911 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12912}
12913
12914
12915/* Opcode 0xf3 0x0f 0xd3 - invalid */
12916/* Opcode 0xf2 0x0f 0xd3 - invalid */
12917
12918
12919/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12920FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12921{
12922 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12923 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_paddq_u64);
12924}
12925
12926
12927/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12928FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12929{
12930 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12931 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
12932}
12933
12934
12935/* Opcode 0xf3 0x0f 0xd4 - invalid */
12936/* Opcode 0xf2 0x0f 0xd4 - invalid */
12937
12938/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12939FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12940{
12941 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12942 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
12943}
12944
12945/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12946FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12947{
12948 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12949 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
12950}
12951
12952
12953/* Opcode 0xf3 0x0f 0xd5 - invalid */
12954/* Opcode 0xf2 0x0f 0xd5 - invalid */
12955
12956/* Opcode 0x0f 0xd6 - invalid */
12957
12958/**
12959 * @opcode 0xd6
12960 * @oppfx 0x66
12961 * @opcpuid sse2
12962 * @opgroup og_sse2_pcksclr_datamove
12963 * @opxcpttype none
12964 * @optest op1=-1 op2=2 -> op1=2
12965 * @optest op1=0 op2=-42 -> op1=-42
12966 */
12967FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12968{
12969 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12970 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12971 if (IEM_IS_MODRM_REG_MODE(bRm))
12972 {
12973 /*
12974 * Register, register.
12975 */
12976 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12978 IEM_MC_LOCAL(uint64_t, uSrc);
12979
12980 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12981 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12982
12983 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12984 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12985
12986 IEM_MC_ADVANCE_RIP_AND_FINISH();
12987 IEM_MC_END();
12988 }
12989 else
12990 {
12991 /*
12992 * Memory, register.
12993 */
12994 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12995 IEM_MC_LOCAL(uint64_t, uSrc);
12996 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12997
12998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13000 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13001 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13002
13003 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
13004 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13005
13006 IEM_MC_ADVANCE_RIP_AND_FINISH();
13007 IEM_MC_END();
13008 }
13009}
13010
13011
13012/**
13013 * @opcode 0xd6
13014 * @opcodesub 11 mr/reg
13015 * @oppfx f3
13016 * @opcpuid sse2
13017 * @opgroup og_sse2_simdint_datamove
13018 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13019 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13020 */
13021FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
13022{
13023 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13024 if (IEM_IS_MODRM_REG_MODE(bRm))
13025 {
13026 /*
13027 * Register, register.
13028 */
13029 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13030 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
13031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13032 IEM_MC_LOCAL(uint64_t, uSrc);
13033
13034 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13035 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13036 IEM_MC_FPU_TO_MMX_MODE();
13037
13038 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
13039 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
13040
13041 IEM_MC_ADVANCE_RIP_AND_FINISH();
13042 IEM_MC_END();
13043 }
13044
13045 /**
13046 * @opdone
13047 * @opmnemonic udf30fd6mem
13048 * @opcode 0xd6
13049 * @opcodesub !11 mr/reg
13050 * @oppfx f3
13051 * @opunused intel-modrm
13052 * @opcpuid sse
13053 * @optest ->
13054 */
13055 else
13056 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13057}
13058
13059
13060/**
13061 * @opcode 0xd6
13062 * @opcodesub 11 mr/reg
13063 * @oppfx f2
13064 * @opcpuid sse2
13065 * @opgroup og_sse2_simdint_datamove
13066 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13067 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13068 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
13069 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
13070 * @optest op1=-42 op2=0xfedcba9876543210
13071 * -> op1=0xfedcba9876543210 ftw=0xff
13072 */
13073FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
13074{
13075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13076 if (IEM_IS_MODRM_REG_MODE(bRm))
13077 {
13078 /*
13079 * Register, register.
13080 */
13081 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13082 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
13083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13084 IEM_MC_LOCAL(uint64_t, uSrc);
13085
13086 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13087 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13088 IEM_MC_FPU_TO_MMX_MODE();
13089
13090 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
13091 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
13092
13093 IEM_MC_ADVANCE_RIP_AND_FINISH();
13094 IEM_MC_END();
13095 }
13096
13097 /**
13098 * @opdone
13099 * @opmnemonic udf20fd6mem
13100 * @opcode 0xd6
13101 * @opcodesub !11 mr/reg
13102 * @oppfx f2
13103 * @opunused intel-modrm
13104 * @opcpuid sse
13105 * @optest ->
13106 */
13107 else
13108 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13109}
13110
13111
13112/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
13113FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
13114{
13115 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13116 /* Docs says register only. */
13117 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13118 {
13119 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13120 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
13121 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
13122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13123 IEM_MC_ARG(uint64_t *, puDst, 0);
13124 IEM_MC_ARG(uint64_t const *, puSrc, 1);
13125 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13126 IEM_MC_PREPARE_FPU_USAGE();
13127 IEM_MC_FPU_TO_MMX_MODE();
13128
13129 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13130 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
13131 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
13132
13133 IEM_MC_ADVANCE_RIP_AND_FINISH();
13134 IEM_MC_END();
13135 }
13136 else
13137 IEMOP_RAISE_INVALID_OPCODE_RET();
13138}
13139
13140
13141/** Opcode 0x66 0x0f 0xd7 - */
13142FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
13143{
13144 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13145 /* Docs says register only. */
13146 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13147 {
13148 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13149 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
13150 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
13151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13152 IEM_MC_ARG(uint64_t *, puDst, 0);
13153 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
13154 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13155 IEM_MC_PREPARE_SSE_USAGE();
13156 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13157 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
13158 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
13159 IEM_MC_ADVANCE_RIP_AND_FINISH();
13160 IEM_MC_END();
13161 }
13162 else
13163 IEMOP_RAISE_INVALID_OPCODE_RET();
13164}
13165
13166
13167/* Opcode 0xf3 0x0f 0xd7 - invalid */
13168/* Opcode 0xf2 0x0f 0xd7 - invalid */
13169
13170
13171/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
13172FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
13173{
13174 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13175 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
13176}
13177
13178
13179/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
13180FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
13181{
13182 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13183 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
13184}
13185
13186
13187/* Opcode 0xf3 0x0f 0xd8 - invalid */
13188/* Opcode 0xf2 0x0f 0xd8 - invalid */
13189
13190/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
13191FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
13192{
13193 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13194 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
13195}
13196
13197
13198/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
13199FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
13200{
13201 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13202 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
13203}
13204
13205
13206/* Opcode 0xf3 0x0f 0xd9 - invalid */
13207/* Opcode 0xf2 0x0f 0xd9 - invalid */
13208
13209/** Opcode 0x0f 0xda - pminub Pq, Qq */
13210FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
13211{
13212 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13213 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
13214}
13215
13216
13217/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
13218FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
13219{
13220 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13221 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
13222}
13223
13224/* Opcode 0xf3 0x0f 0xda - invalid */
13225/* Opcode 0xf2 0x0f 0xda - invalid */
13226
13227/** Opcode 0x0f 0xdb - pand Pq, Qq */
13228FNIEMOP_DEF(iemOp_pand_Pq_Qq)
13229{
13230 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13231 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
13232}
13233
13234
13235/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
13236FNIEMOP_DEF(iemOp_pand_Vx_Wx)
13237{
13238 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13239 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
13240}
13241
13242
13243/* Opcode 0xf3 0x0f 0xdb - invalid */
13244/* Opcode 0xf2 0x0f 0xdb - invalid */
13245
13246/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
13247FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
13248{
13249 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13250 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
13251}
13252
13253
13254/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
13255FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
13256{
13257 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13258 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
13259}
13260
13261
13262/* Opcode 0xf3 0x0f 0xdc - invalid */
13263/* Opcode 0xf2 0x0f 0xdc - invalid */
13264
13265/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
13266FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
13267{
13268 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13269 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
13270}
13271
13272
13273/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13274FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13275{
13276 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13277 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
13278}
13279
13280
13281/* Opcode 0xf3 0x0f 0xdd - invalid */
13282/* Opcode 0xf2 0x0f 0xdd - invalid */
13283
13284/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13285FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13286{
13287 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13288 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
13289}
13290
13291
13292/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13293FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13294{
13295 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13296 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
13297}
13298
13299/* Opcode 0xf3 0x0f 0xde - invalid */
13300/* Opcode 0xf2 0x0f 0xde - invalid */
13301
13302
13303/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13304FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13305{
13306 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13307 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
13308}
13309
13310
13311/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13312FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13313{
13314 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13315 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
13316}
13317
13318
13319/* Opcode 0xf3 0x0f 0xdf - invalid */
13320/* Opcode 0xf2 0x0f 0xdf - invalid */
13321
13322/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13323FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13324{
13325 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13326 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13327}
13328
13329
13330/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13331FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13332{
13333 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13334 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13335}
13336
13337
13338/* Opcode 0xf3 0x0f 0xe0 - invalid */
13339/* Opcode 0xf2 0x0f 0xe0 - invalid */
13340
13341/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13342FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13343{
13344 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13345 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13346}
13347
13348
13349/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13350FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13351{
13352 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13353 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13354}
13355
13356
13357/* Opcode 0xf3 0x0f 0xe1 - invalid */
13358/* Opcode 0xf2 0x0f 0xe1 - invalid */
13359
13360/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13361FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13362{
13363 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13364 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13365}
13366
13367
13368/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13369FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13370{
13371 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13372 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13373}
13374
13375
13376/* Opcode 0xf3 0x0f 0xe2 - invalid */
13377/* Opcode 0xf2 0x0f 0xe2 - invalid */
13378
13379/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13380FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13381{
13382 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13383 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13384}
13385
13386
13387/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13388FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13389{
13390 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13391 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13392}
13393
13394
13395/* Opcode 0xf3 0x0f 0xe3 - invalid */
13396/* Opcode 0xf2 0x0f 0xe3 - invalid */
13397
13398/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13399FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13400{
13401 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13402 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13403}
13404
13405
13406/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13407FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13408{
13409 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13410 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13411}
13412
13413
13414/* Opcode 0xf3 0x0f 0xe4 - invalid */
13415/* Opcode 0xf2 0x0f 0xe4 - invalid */
13416
13417/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13418FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13419{
13420 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13421 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
13422}
13423
13424
13425/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13426FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13427{
13428 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13429 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
13430}
13431
13432
13433/* Opcode 0xf3 0x0f 0xe5 - invalid */
13434/* Opcode 0xf2 0x0f 0xe5 - invalid */
13435/* Opcode 0x0f 0xe6 - invalid */
13436
13437
13438/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13439FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13440{
13441 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13442 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13443}
13444
13445
13446/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13447FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13448{
13449 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13450 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13451}
13452
13453
13454/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13455FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13456{
13457 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13458 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13459}
13460
13461
13462/**
13463 * @opcode 0xe7
13464 * @opcodesub !11 mr/reg
13465 * @oppfx none
13466 * @opcpuid sse
13467 * @opgroup og_sse1_cachect
13468 * @opxcpttype none
13469 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13470 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13471 */
13472FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13473{
13474 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13476 if (IEM_IS_MODRM_MEM_MODE(bRm))
13477 {
13478 /* Register, memory. */
13479 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13480 IEM_MC_LOCAL(uint64_t, uSrc);
13481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13482
13483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
13485 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13486 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13487 IEM_MC_FPU_TO_MMX_MODE();
13488
13489 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13490 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13491
13492 IEM_MC_ADVANCE_RIP_AND_FINISH();
13493 IEM_MC_END();
13494 }
13495 /**
13496 * @opdone
13497 * @opmnemonic ud0fe7reg
13498 * @opcode 0xe7
13499 * @opcodesub 11 mr/reg
13500 * @oppfx none
13501 * @opunused immediate
13502 * @opcpuid sse
13503 * @optest ->
13504 */
13505 else
13506 IEMOP_RAISE_INVALID_OPCODE_RET();
13507}
13508
13509/**
13510 * @opcode 0xe7
13511 * @opcodesub !11 mr/reg
13512 * @oppfx 0x66
13513 * @opcpuid sse2
13514 * @opgroup og_sse2_cachect
13515 * @opxcpttype 1
13516 * @optest op1=-1 op2=2 -> op1=2
13517 * @optest op1=0 op2=-42 -> op1=-42
13518 */
13519FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13520{
13521 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13522 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13523 if (IEM_IS_MODRM_MEM_MODE(bRm))
13524 {
13525 /* Register, memory. */
13526 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13527 IEM_MC_LOCAL(RTUINT128U, uSrc);
13528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13529
13530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13532 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13533 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13534
13535 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13536 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13537
13538 IEM_MC_ADVANCE_RIP_AND_FINISH();
13539 IEM_MC_END();
13540 }
13541
13542 /**
13543 * @opdone
13544 * @opmnemonic ud660fe7reg
13545 * @opcode 0xe7
13546 * @opcodesub 11 mr/reg
13547 * @oppfx 0x66
13548 * @opunused immediate
13549 * @opcpuid sse
13550 * @optest ->
13551 */
13552 else
13553 IEMOP_RAISE_INVALID_OPCODE_RET();
13554}
13555
13556/* Opcode 0xf3 0x0f 0xe7 - invalid */
13557/* Opcode 0xf2 0x0f 0xe7 - invalid */
13558
13559
13560/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13561FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13562{
13563 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13564 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
13565}
13566
13567
13568/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13569FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13570{
13571 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13572 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
13573}
13574
13575
13576/* Opcode 0xf3 0x0f 0xe8 - invalid */
13577/* Opcode 0xf2 0x0f 0xe8 - invalid */
13578
13579/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13580FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13581{
13582 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13583 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
13584}
13585
13586
13587/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13588FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13589{
13590 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13591 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
13592}
13593
13594
13595/* Opcode 0xf3 0x0f 0xe9 - invalid */
13596/* Opcode 0xf2 0x0f 0xe9 - invalid */
13597
13598
13599/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13600FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13601{
13602 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13603 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
13604}
13605
13606
13607/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13608FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13609{
13610 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13611 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
13612}
13613
13614
13615/* Opcode 0xf3 0x0f 0xea - invalid */
13616/* Opcode 0xf2 0x0f 0xea - invalid */
13617
13618
13619/** Opcode 0x0f 0xeb - por Pq, Qq */
13620FNIEMOP_DEF(iemOp_por_Pq_Qq)
13621{
13622 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13623 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
13624}
13625
13626
13627/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13628FNIEMOP_DEF(iemOp_por_Vx_Wx)
13629{
13630 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13631 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
13632}
13633
13634
13635/* Opcode 0xf3 0x0f 0xeb - invalid */
13636/* Opcode 0xf2 0x0f 0xeb - invalid */
13637
13638/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13639FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13640{
13641 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13642 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
13643}
13644
13645
13646/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13647FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13648{
13649 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13650 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
13651}
13652
13653
13654/* Opcode 0xf3 0x0f 0xec - invalid */
13655/* Opcode 0xf2 0x0f 0xec - invalid */
13656
13657/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13658FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13659{
13660 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13661 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
13662}
13663
13664
13665/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13666FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13667{
13668 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13669 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
13670}
13671
13672
13673/* Opcode 0xf3 0x0f 0xed - invalid */
13674/* Opcode 0xf2 0x0f 0xed - invalid */
13675
13676
13677/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13678FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13679{
13680 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13681 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13682}
13683
13684
13685/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13686FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13687{
13688 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13689 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13690}
13691
13692
13693/* Opcode 0xf3 0x0f 0xee - invalid */
13694/* Opcode 0xf2 0x0f 0xee - invalid */
13695
13696
13697/** Opcode 0x0f 0xef - pxor Pq, Qq */
13698FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13699{
13700 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13701 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
13702}
13703
13704
13705/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13706FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13707{
13708 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13709 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
13710}
13711
13712
13713/* Opcode 0xf3 0x0f 0xef - invalid */
13714/* Opcode 0xf2 0x0f 0xef - invalid */
13715
13716/* Opcode 0x0f 0xf0 - invalid */
13717/* Opcode 0x66 0x0f 0xf0 - invalid */
13718
13719
13720/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13721FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13722{
13723 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13724 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13725 if (IEM_IS_MODRM_REG_MODE(bRm))
13726 {
13727 /*
13728 * Register, register - (not implemented, assuming it raises \#UD).
13729 */
13730 IEMOP_RAISE_INVALID_OPCODE_RET();
13731 }
13732 else
13733 {
13734 /*
13735 * Register, memory.
13736 */
13737 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13738 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13740
13741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
13743 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13744 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13745 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13746 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13747
13748 IEM_MC_ADVANCE_RIP_AND_FINISH();
13749 IEM_MC_END();
13750 }
13751}
13752
13753
13754/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13755FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13756{
13757 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13758 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13759}
13760
13761
13762/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13763FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13764{
13765 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13766 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13767}
13768
13769
13770/* Opcode 0xf2 0x0f 0xf1 - invalid */
13771
13772/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13773FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13774{
13775 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13776 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13777}
13778
13779
13780/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13781FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13782{
13783 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13784 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13785}
13786
13787
13788/* Opcode 0xf2 0x0f 0xf2 - invalid */
13789
13790/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13791FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13792{
13793 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13794 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13795}
13796
13797
13798/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13799FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13800{
13801 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13802 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13803}
13804
13805/* Opcode 0xf2 0x0f 0xf3 - invalid */
13806
13807/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13808FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13809{
13810 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13811 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
13812}
13813
13814
13815/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13816FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13817{
13818 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13819 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
13820}
13821
13822
13823/* Opcode 0xf2 0x0f 0xf4 - invalid */
13824
13825/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13826FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13827{
13828 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13829 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13830}
13831
13832
13833/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13834FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13835{
13836 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13837 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13838}
13839
13840/* Opcode 0xf2 0x0f 0xf5 - invalid */
13841
13842/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13843FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13844{
13845 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13846 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13847}
13848
13849
13850/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13851FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13852{
13853 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13854 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13855}
13856
13857
13858/* Opcode 0xf2 0x0f 0xf6 - invalid */
13859
13860/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13861FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13862/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13863FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13864/* Opcode 0xf2 0x0f 0xf7 - invalid */
13865
13866
13867/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13868FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13869{
13870 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13871 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
13872}
13873
13874
13875/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13876FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13877{
13878 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13879 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
13880}
13881
13882
13883/* Opcode 0xf2 0x0f 0xf8 - invalid */
13884
13885
13886/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13887FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13888{
13889 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13890 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
13891}
13892
13893
13894/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13895FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13896{
13897 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13898 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
13899}
13900
13901
13902/* Opcode 0xf2 0x0f 0xf9 - invalid */
13903
13904
13905/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13906FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13907{
13908 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13909 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
13910}
13911
13912
13913/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13914FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13915{
13916 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13917 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
13918}
13919
13920
13921/* Opcode 0xf2 0x0f 0xfa - invalid */
13922
13923
13924/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13925FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13926{
13927 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13928 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_psubq_u64);
13929}
13930
13931
13932/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13933FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13934{
13935 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13936 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
13937}
13938
13939
13940/* Opcode 0xf2 0x0f 0xfb - invalid */
13941
13942
13943/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13944FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13945{
13946 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13947 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
13948}
13949
13950
13951/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13952FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13953{
13954 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13955 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
13956}
13957
13958
13959/* Opcode 0xf2 0x0f 0xfc - invalid */
13960
13961
13962/** Opcode 0x0f 0xfd - paddw Pq, Qq */
13963FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
13964{
13965 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13966 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
13967}
13968
13969
13970/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
13971FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
13972{
13973 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13974 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
13975}
13976
13977
13978/* Opcode 0xf2 0x0f 0xfd - invalid */
13979
13980
13981/** Opcode 0x0f 0xfe - paddd Pq, Qq */
13982FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
13983{
13984 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13985 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
13986}
13987
13988
13989/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
13990FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
13991{
13992 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13993 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
13994}
13995
13996
13997/* Opcode 0xf2 0x0f 0xfe - invalid */
13998
13999
14000/** Opcode **** 0x0f 0xff - UD0 */
14001FNIEMOP_DEF(iemOp_ud0)
14002{
14003 IEMOP_MNEMONIC(ud0, "ud0");
14004 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
14005 {
14006 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
14007 if (IEM_IS_MODRM_MEM_MODE(bRm))
14008 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
14009 }
14010 IEMOP_HLP_DONE_DECODING();
14011 IEMOP_RAISE_INVALID_OPCODE_RET();
14012}
14013
14014
14015
14016/**
14017 * Two byte opcode map, first byte 0x0f.
14018 *
14019 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
14020 * check if it needs updating as well when making changes.
14021 */
14022const PFNIEMOP g_apfnTwoByteMap[] =
14023{
14024 /* no prefix, 066h prefix f3h prefix, f2h prefix */
14025 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
14026 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
14027 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
14028 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
14029 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
14030 /* 0x05 */ IEMOP_X4(iemOp_syscall),
14031 /* 0x06 */ IEMOP_X4(iemOp_clts),
14032 /* 0x07 */ IEMOP_X4(iemOp_sysret),
14033 /* 0x08 */ IEMOP_X4(iemOp_invd),
14034 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
14035 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
14036 /* 0x0b */ IEMOP_X4(iemOp_ud2),
14037 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
14038 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
14039 /* 0x0e */ IEMOP_X4(iemOp_femms),
14040 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
14041
14042 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
14043 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
14044 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
14045 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14046 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14047 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14048 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
14049 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14050 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
14051 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
14052 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
14053 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
14054 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
14055 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
14056 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
14057 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
14058
14059 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
14060 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
14061 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
14062 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
14063 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
14064 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14065 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
14066 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14067 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14068 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14069 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
14070 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14071 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
14072 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
14073 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14074 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14075
14076 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
14077 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
14078 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
14079 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
14080 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
14081 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
14082 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
14083 /* 0x37 */ IEMOP_X4(iemOp_getsec),
14084 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
14085 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14086 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
14087 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14088 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14089 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14090 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14091 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14092
14093 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
14094 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
14095 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
14096 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
14097 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
14098 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
14099 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
14100 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
14101 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
14102 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
14103 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
14104 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
14105 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
14106 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
14107 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
14108 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
14109
14110 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14111 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
14112 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
14113 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
14114 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14115 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14116 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14117 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14118 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
14119 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
14120 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
14121 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
14122 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
14123 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
14124 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
14125 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
14126
14127 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14128 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14129 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14130 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14131 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14132 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14133 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14134 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14135 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14136 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14137 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14138 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14139 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14140 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14141 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14142 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
14143
14144 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
14145 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
14146 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
14147 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
14148 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14149 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14150 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14151 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14152
14153 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14154 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14155 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14156 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14157 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
14158 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
14159 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
14160 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
14161
14162 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
14163 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
14164 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
14165 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
14166 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
14167 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
14168 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
14169 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
14170 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
14171 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
14172 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
14173 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
14174 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
14175 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
14176 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
14177 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
14178
14179 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
14180 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
14181 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
14182 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
14183 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
14184 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
14185 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
14186 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
14187 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
14188 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
14189 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
14190 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
14191 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
14192 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
14193 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
14194 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
14195
14196 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
14197 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
14198 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
14199 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
14200 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
14201 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
14202 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
14203 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
14204 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
14205 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
14206 /* 0xaa */ IEMOP_X4(iemOp_rsm),
14207 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
14208 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
14209 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
14210 /* 0xae */ IEMOP_X4(iemOp_Grp15),
14211 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
14212
14213 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
14214 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
14215 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
14216 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
14217 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
14218 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
14219 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
14220 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
14221 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
14222 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
14223 /* 0xba */ IEMOP_X4(iemOp_Grp8),
14224 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
14225 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
14226 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
14227 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
14228 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
14229
14230 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
14231 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
14232 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
14233 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14234 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14235 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14236 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14237 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
14238 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
14239 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
14240 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
14241 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
14242 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
14243 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
14244 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
14245 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
14246
14247 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
14248 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14249 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14250 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14251 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14252 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14253 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
14254 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14255 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14256 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14257 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14258 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14259 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14260 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14261 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14262 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14263
14264 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14265 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14266 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14267 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14268 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14269 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14270 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
14271 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14272 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14273 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14274 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14275 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14276 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14277 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14278 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14279 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14280
14281 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14282 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14283 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14284 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14285 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14286 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14287 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14288 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14289 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14290 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14291 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14292 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14293 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14294 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14295 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14296 /* 0xff */ IEMOP_X4(iemOp_ud0),
14297};
14298AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14299
14300/** @} */
14301
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette