VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h@ 103025

Last change on this file since 103025 was 102978, checked in by vboxsync, 13 months ago

VMM/IEM: Implemented generic fallback for misaligned x86 locking that is not compatible with the host. Using the existing split-lock solution with VINF_EM_EMULATE_SPLIT_LOCK from bugref:10052. We keep ignoring the 'lock' prefix in the recompiler for single CPU VMs (now also on amd64 hosts). [build fixes] bugref:10547

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 524.4 KB
Line 
1/* $Id: IEMAllInstTwoByte0f.cpp.h 102978 2024-01-19 23:19:05Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * MMX, MMX.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
53 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_FPU_TO_MMX_MODE();
59
60 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
61 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
62 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
63 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
64
65 IEM_MC_ADVANCE_RIP_AND_FINISH();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * MMX, [mem64].
72 */
73 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
74 IEM_MC_ARG(uint64_t *, pDst, 0);
75 IEM_MC_LOCAL(uint64_t, uSrc);
76 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
81 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_FPU_USAGE();
85 IEM_MC_FPU_TO_MMX_MODE();
86
87 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
88 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
89 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
90
91 IEM_MC_ADVANCE_RIP_AND_FINISH();
92 IEM_MC_END();
93 }
94}
95
96
97/**
98 * Common worker for MMX instructions on the form:
99 * pxxx mm1, mm2/mem64
100 *
101 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
102 * no FXSAVE state, just the operands.
103 */
104FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
105{
106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
107 if (IEM_IS_MODRM_REG_MODE(bRm))
108 {
109 /*
110 * MMX, MMX.
111 */
112 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
113 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
114 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
116 IEM_MC_ARG(uint64_t *, pDst, 0);
117 IEM_MC_ARG(uint64_t const *, pSrc, 1);
118 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
119 IEM_MC_PREPARE_FPU_USAGE();
120 IEM_MC_FPU_TO_MMX_MODE();
121
122 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
123 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
124 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
125 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
126
127 IEM_MC_ADVANCE_RIP_AND_FINISH();
128 IEM_MC_END();
129 }
130 else
131 {
132 /*
133 * MMX, [mem64].
134 */
135 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
136 IEM_MC_ARG(uint64_t *, pDst, 0);
137 IEM_MC_LOCAL(uint64_t, uSrc);
138 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
140
141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
143 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
144 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
145
146 IEM_MC_PREPARE_FPU_USAGE();
147 IEM_MC_FPU_TO_MMX_MODE();
148
149 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
150 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
151 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
152
153 IEM_MC_ADVANCE_RIP_AND_FINISH();
154 IEM_MC_END();
155 }
156}
157
158
159/**
160 * Common worker for MMX instructions on the form:
161 * pxxx mm1, mm2/mem64
162 * for instructions introduced with SSE.
163 */
164FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
165{
166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
167 if (IEM_IS_MODRM_REG_MODE(bRm))
168 {
169 /*
170 * MMX, MMX.
171 */
172 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
173 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
174 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
176 IEM_MC_ARG(uint64_t *, pDst, 0);
177 IEM_MC_ARG(uint64_t const *, pSrc, 1);
178 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
179 IEM_MC_PREPARE_FPU_USAGE();
180 IEM_MC_FPU_TO_MMX_MODE();
181
182 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
183 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
184 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
185 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
186
187 IEM_MC_ADVANCE_RIP_AND_FINISH();
188 IEM_MC_END();
189 }
190 else
191 {
192 /*
193 * MMX, [mem64].
194 */
195 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
196 IEM_MC_ARG(uint64_t *, pDst, 0);
197 IEM_MC_LOCAL(uint64_t, uSrc);
198 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200
201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
203 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
204 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
205
206 IEM_MC_PREPARE_FPU_USAGE();
207 IEM_MC_FPU_TO_MMX_MODE();
208
209 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
210 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
211 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
212
213 IEM_MC_ADVANCE_RIP_AND_FINISH();
214 IEM_MC_END();
215 }
216}
217
218
219/**
220 * Common worker for MMX instructions on the form:
221 * pxxx mm1, mm2/mem64
222 * for instructions introduced with SSE.
223 *
224 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
225 * no FXSAVE state, just the operands.
226 */
227FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
228{
229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
230 if (IEM_IS_MODRM_REG_MODE(bRm))
231 {
232 /*
233 * MMX, MMX.
234 */
235 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
236 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
237 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
239 IEM_MC_ARG(uint64_t *, pDst, 0);
240 IEM_MC_ARG(uint64_t const *, pSrc, 1);
241 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
242 IEM_MC_PREPARE_FPU_USAGE();
243 IEM_MC_FPU_TO_MMX_MODE();
244
245 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
246 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
247 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
248 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
249
250 IEM_MC_ADVANCE_RIP_AND_FINISH();
251 IEM_MC_END();
252 }
253 else
254 {
255 /*
256 * MMX, [mem64].
257 */
258 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
259 IEM_MC_ARG(uint64_t *, pDst, 0);
260 IEM_MC_LOCAL(uint64_t, uSrc);
261 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
263
264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
267 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
268
269 IEM_MC_PREPARE_FPU_USAGE();
270 IEM_MC_FPU_TO_MMX_MODE();
271
272 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
273 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
274 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
275
276 IEM_MC_ADVANCE_RIP_AND_FINISH();
277 IEM_MC_END();
278 }
279}
280
281
282/**
283 * Common worker for MMX instructions on the form:
284 * pxxx mm1, mm2/mem64
285 * that was introduced with SSE2.
286 */
287FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full_Sse2, PFNIEMAIMPLMEDIAF2U64, pfnU64)
288{
289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
290 if (IEM_IS_MODRM_REG_MODE(bRm))
291 {
292 /*
293 * MMX, MMX.
294 */
295 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
296 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
297 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
299 IEM_MC_ARG(uint64_t *, pDst, 0);
300 IEM_MC_ARG(uint64_t const *, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
302 IEM_MC_PREPARE_FPU_USAGE();
303 IEM_MC_FPU_TO_MMX_MODE();
304
305 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
306 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
307 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
308 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
309
310 IEM_MC_ADVANCE_RIP_AND_FINISH();
311 IEM_MC_END();
312 }
313 else
314 {
315 /*
316 * MMX, [mem64].
317 */
318 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
319 IEM_MC_ARG(uint64_t *, pDst, 0);
320 IEM_MC_LOCAL(uint64_t, uSrc);
321 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
323
324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
326 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
327 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
328
329 IEM_MC_PREPARE_FPU_USAGE();
330 IEM_MC_FPU_TO_MMX_MODE();
331
332 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
333 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
334 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
335
336 IEM_MC_ADVANCE_RIP_AND_FINISH();
337 IEM_MC_END();
338 }
339}
340
341
342/**
343 * Common worker for SSE instructions of the form:
344 * pxxx xmm1, xmm2/mem128
345 *
346 * Proper alignment of the 128-bit operand is enforced.
347 * SSE cpuid checks. No SIMD FP exceptions.
348 *
349 * @sa iemOpCommonSse2_FullFull_To_Full
350 */
351FNIEMOP_DEF_1(iemOpCommonSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
352{
353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
354 if (IEM_IS_MODRM_REG_MODE(bRm))
355 {
356 /*
357 * XMM, XMM.
358 */
359 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
361 IEM_MC_ARG(PRTUINT128U, pDst, 0);
362 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
363 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
364 IEM_MC_PREPARE_SSE_USAGE();
365 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
366 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
367 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
368 IEM_MC_ADVANCE_RIP_AND_FINISH();
369 IEM_MC_END();
370 }
371 else
372 {
373 /*
374 * XMM, [mem128].
375 */
376 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
377 IEM_MC_ARG(PRTUINT128U, pDst, 0);
378 IEM_MC_LOCAL(RTUINT128U, uSrc);
379 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
381
382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
384 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
385 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
386
387 IEM_MC_PREPARE_SSE_USAGE();
388 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
389 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
390
391 IEM_MC_ADVANCE_RIP_AND_FINISH();
392 IEM_MC_END();
393 }
394}
395
396
397/**
398 * Common worker for SSE2 instructions on the forms:
399 * pxxx xmm1, xmm2/mem128
400 *
401 * Proper alignment of the 128-bit operand is enforced.
402 * Exceptions type 4. SSE2 cpuid checks.
403 *
404 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
405 */
406FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
407{
408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
409 if (IEM_IS_MODRM_REG_MODE(bRm))
410 {
411 /*
412 * XMM, XMM.
413 */
414 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
417 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
418 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
419 IEM_MC_PREPARE_SSE_USAGE();
420 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
421 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
422 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
423 IEM_MC_ADVANCE_RIP_AND_FINISH();
424 IEM_MC_END();
425 }
426 else
427 {
428 /*
429 * XMM, [mem128].
430 */
431 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
432 IEM_MC_ARG(PRTUINT128U, pDst, 0);
433 IEM_MC_LOCAL(RTUINT128U, uSrc);
434 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
436
437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
439 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
440 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
441
442 IEM_MC_PREPARE_SSE_USAGE();
443 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
444 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
445
446 IEM_MC_ADVANCE_RIP_AND_FINISH();
447 IEM_MC_END();
448 }
449}
450
451
452/**
453 * Common worker for SSE2 instructions on the forms:
454 * pxxx xmm1, xmm2/mem128
455 *
456 * Proper alignment of the 128-bit operand is enforced.
457 * Exceptions type 4. SSE2 cpuid checks.
458 *
459 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
460 * no FXSAVE state, just the operands.
461 *
462 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
463 */
464FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
465{
466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
467 if (IEM_IS_MODRM_REG_MODE(bRm))
468 {
469 /*
470 * XMM, XMM.
471 */
472 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
474 IEM_MC_ARG(PRTUINT128U, pDst, 0);
475 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
477 IEM_MC_PREPARE_SSE_USAGE();
478 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
479 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
480 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
481 IEM_MC_ADVANCE_RIP_AND_FINISH();
482 IEM_MC_END();
483 }
484 else
485 {
486 /*
487 * XMM, [mem128].
488 */
489 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
490 IEM_MC_ARG(PRTUINT128U, pDst, 0);
491 IEM_MC_LOCAL(RTUINT128U, uSrc);
492 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
494
495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
498 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
499
500 IEM_MC_PREPARE_SSE_USAGE();
501 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
502 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
503
504 IEM_MC_ADVANCE_RIP_AND_FINISH();
505 IEM_MC_END();
506 }
507}
508
509
510/**
511 * Common worker for MMX instructions on the forms:
512 * pxxxx mm1, mm2/mem32
513 *
514 * The 2nd operand is the first half of a register, which in the memory case
515 * means a 32-bit memory access.
516 */
517FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
518{
519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
520 if (IEM_IS_MODRM_REG_MODE(bRm))
521 {
522 /*
523 * MMX, MMX.
524 */
525 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
527 IEM_MC_ARG(uint64_t *, puDst, 0);
528 IEM_MC_ARG(uint64_t const *, puSrc, 1);
529 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
530 IEM_MC_PREPARE_FPU_USAGE();
531 IEM_MC_FPU_TO_MMX_MODE();
532
533 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
534 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
535 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
536 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
537
538 IEM_MC_ADVANCE_RIP_AND_FINISH();
539 IEM_MC_END();
540 }
541 else
542 {
543 /*
544 * MMX, [mem32].
545 */
546 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
547 IEM_MC_ARG(uint64_t *, puDst, 0);
548 IEM_MC_LOCAL(uint64_t, uSrc);
549 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
551
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
554 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
555 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
556
557 IEM_MC_PREPARE_FPU_USAGE();
558 IEM_MC_FPU_TO_MMX_MODE();
559
560 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
561 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
562 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
563
564 IEM_MC_ADVANCE_RIP_AND_FINISH();
565 IEM_MC_END();
566 }
567}
568
569
570/**
571 * Common worker for SSE instructions on the forms:
572 * pxxxx xmm1, xmm2/mem128
573 *
574 * The 2nd operand is the first half of a register, which in the memory case
575 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
576 *
577 * Exceptions type 4.
578 */
579FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
580{
581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
582 if (IEM_IS_MODRM_REG_MODE(bRm))
583 {
584 /*
585 * XMM, XMM.
586 */
587 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
589 IEM_MC_ARG(PRTUINT128U, puDst, 0);
590 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
591 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
592 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
593 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
594 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
595 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
596 IEM_MC_ADVANCE_RIP_AND_FINISH();
597 IEM_MC_END();
598 }
599 else
600 {
601 /*
602 * XMM, [mem128].
603 */
604 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
606 IEM_MC_LOCAL(RTUINT128U, uSrc);
607 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
609
610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
612 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
613 /** @todo Most CPUs probably only read the low qword. We read everything to
614 * make sure we apply segmentation and alignment checks correctly.
615 * When we have time, it would be interesting to explore what real
616 * CPUs actually does and whether it will do a TLB load for the high
617 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
618 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
619
620 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
621 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
622 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
623
624 IEM_MC_ADVANCE_RIP_AND_FINISH();
625 IEM_MC_END();
626 }
627}
628
629
630/**
631 * Common worker for SSE2 instructions on the forms:
632 * pxxxx xmm1, xmm2/mem128
633 *
634 * The 2nd operand is the first half of a register, which in the memory case
635 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
636 *
637 * Exceptions type 4.
638 */
639FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
640{
641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
642 if (IEM_IS_MODRM_REG_MODE(bRm))
643 {
644 /*
645 * XMM, XMM.
646 */
647 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
649 IEM_MC_ARG(PRTUINT128U, puDst, 0);
650 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
651 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
653 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
654 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
655 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
656 IEM_MC_ADVANCE_RIP_AND_FINISH();
657 IEM_MC_END();
658 }
659 else
660 {
661 /*
662 * XMM, [mem128].
663 */
664 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
665 IEM_MC_ARG(PRTUINT128U, puDst, 0);
666 IEM_MC_LOCAL(RTUINT128U, uSrc);
667 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
669
670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
672 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
673 /** @todo Most CPUs probably only read the low qword. We read everything to
674 * make sure we apply segmentation and alignment checks correctly.
675 * When we have time, it would be interesting to explore what real
676 * CPUs actually does and whether it will do a TLB load for the high
677 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
678 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
679
680 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
681 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
682 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
683
684 IEM_MC_ADVANCE_RIP_AND_FINISH();
685 IEM_MC_END();
686 }
687}
688
689
690/**
691 * Common worker for MMX instructions on the form:
692 * pxxxx mm1, mm2/mem64
693 *
694 * The 2nd operand is the second half of a register, which in the memory case
695 * means a 64-bit memory access for MMX.
696 */
697FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
698{
699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
700 if (IEM_IS_MODRM_REG_MODE(bRm))
701 {
702 /*
703 * MMX, MMX.
704 */
705 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
706 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
707 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
709 IEM_MC_ARG(uint64_t *, puDst, 0);
710 IEM_MC_ARG(uint64_t const *, puSrc, 1);
711 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
712 IEM_MC_PREPARE_FPU_USAGE();
713 IEM_MC_FPU_TO_MMX_MODE();
714
715 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
716 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
717 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
718 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
719
720 IEM_MC_ADVANCE_RIP_AND_FINISH();
721 IEM_MC_END();
722 }
723 else
724 {
725 /*
726 * MMX, [mem64].
727 */
728 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
729 IEM_MC_ARG(uint64_t *, puDst, 0);
730 IEM_MC_LOCAL(uint64_t, uSrc);
731 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
733
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
736 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
737 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
738
739 IEM_MC_PREPARE_FPU_USAGE();
740 IEM_MC_FPU_TO_MMX_MODE();
741
742 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
743 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
744 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
745
746 IEM_MC_ADVANCE_RIP_AND_FINISH();
747 IEM_MC_END();
748 }
749}
750
751
752/**
753 * Common worker for SSE instructions on the form:
754 * pxxxx xmm1, xmm2/mem128
755 *
756 * The 2nd operand is the second half of a register, which for SSE a 128-bit
757 * aligned access where it may read the full 128 bits or only the upper 64 bits.
758 *
759 * Exceptions type 4.
760 */
761FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
762{
763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
764 if (IEM_IS_MODRM_REG_MODE(bRm))
765 {
766 /*
767 * XMM, XMM.
768 */
769 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
771 IEM_MC_ARG(PRTUINT128U, puDst, 0);
772 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
773 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
774 IEM_MC_PREPARE_SSE_USAGE();
775 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
776 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
777 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
778 IEM_MC_ADVANCE_RIP_AND_FINISH();
779 IEM_MC_END();
780 }
781 else
782 {
783 /*
784 * XMM, [mem128].
785 */
786 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
787 IEM_MC_ARG(PRTUINT128U, puDst, 0);
788 IEM_MC_LOCAL(RTUINT128U, uSrc);
789 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
791
792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
794 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
795 /** @todo Most CPUs probably only read the high qword. We read everything to
796 * make sure we apply segmentation and alignment checks correctly.
797 * When we have time, it would be interesting to explore what real
798 * CPUs actually does and whether it will do a TLB load for the lower
799 * part or skip any associated \#PF. */
800 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801
802 IEM_MC_PREPARE_SSE_USAGE();
803 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
804 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
805
806 IEM_MC_ADVANCE_RIP_AND_FINISH();
807 IEM_MC_END();
808 }
809}
810
811
812/**
813 * Common worker for SSE instructions on the forms:
814 * pxxs xmm1, xmm2/mem128
815 *
816 * Proper alignment of the 128-bit operand is enforced.
817 * Exceptions type 2. SSE cpuid checks.
818 *
819 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
820 */
821FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
822{
823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
824 if (IEM_IS_MODRM_REG_MODE(bRm))
825 {
826 /*
827 * XMM128, XMM128.
828 */
829 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
831 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
832 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
833 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
834 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
835 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
836 IEM_MC_PREPARE_SSE_USAGE();
837 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
838 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
839 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
840 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
841 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
842
843 IEM_MC_ADVANCE_RIP_AND_FINISH();
844 IEM_MC_END();
845 }
846 else
847 {
848 /*
849 * XMM128, [mem128].
850 */
851 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
852 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
853 IEM_MC_LOCAL(X86XMMREG, uSrc2);
854 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
855 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
856 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
858
859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
861 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
862 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
863
864 IEM_MC_PREPARE_SSE_USAGE();
865 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
866 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
867 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
868 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
869
870 IEM_MC_ADVANCE_RIP_AND_FINISH();
871 IEM_MC_END();
872 }
873}
874
875
876/**
877 * Common worker for SSE instructions on the forms:
878 * pxxs xmm1, xmm2/mem32
879 *
880 * Proper alignment of the 128-bit operand is enforced.
881 * Exceptions type 2. SSE cpuid checks.
882 *
883 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
884 */
885FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
886{
887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
888 if (IEM_IS_MODRM_REG_MODE(bRm))
889 {
890 /*
891 * XMM128, XMM32.
892 */
893 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
895 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
896 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
897 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
898 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
899 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
900 IEM_MC_PREPARE_SSE_USAGE();
901 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
902 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
903 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
904 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
905 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
906
907 IEM_MC_ADVANCE_RIP_AND_FINISH();
908 IEM_MC_END();
909 }
910 else
911 {
912 /*
913 * XMM128, [mem32].
914 */
915 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
916 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
917 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
918 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
919 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
920 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
922
923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
925 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
926 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
927
928 IEM_MC_PREPARE_SSE_USAGE();
929 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
930 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
931 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
932 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
933
934 IEM_MC_ADVANCE_RIP_AND_FINISH();
935 IEM_MC_END();
936 }
937}
938
939
940/**
941 * Common worker for SSE2 instructions on the forms:
942 * pxxd xmm1, xmm2/mem128
943 *
944 * Proper alignment of the 128-bit operand is enforced.
945 * Exceptions type 2. SSE cpuid checks.
946 *
947 * @sa iemOpCommonSseFp_FullFull_To_Full
948 */
949FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
950{
951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
952 if (IEM_IS_MODRM_REG_MODE(bRm))
953 {
954 /*
955 * XMM128, XMM128.
956 */
957 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
959 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
960 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
961 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
962 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
963 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
964 IEM_MC_PREPARE_SSE_USAGE();
965 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
966 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
967 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
968 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
969 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
970
971 IEM_MC_ADVANCE_RIP_AND_FINISH();
972 IEM_MC_END();
973 }
974 else
975 {
976 /*
977 * XMM128, [mem128].
978 */
979 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
980 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
981 IEM_MC_LOCAL(X86XMMREG, uSrc2);
982 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
983 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
984 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
989 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
990 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
991
992 IEM_MC_PREPARE_SSE_USAGE();
993 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
994 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
995 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
996 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
997
998 IEM_MC_ADVANCE_RIP_AND_FINISH();
999 IEM_MC_END();
1000 }
1001}
1002
1003
1004/**
1005 * Common worker for SSE2 instructions on the forms:
1006 * pxxs xmm1, xmm2/mem64
1007 *
1008 * Proper alignment of the 128-bit operand is enforced.
1009 * Exceptions type 2. SSE2 cpuid checks.
1010 *
1011 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1012 */
1013FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
1014{
1015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1016 if (IEM_IS_MODRM_REG_MODE(bRm))
1017 {
1018 /*
1019 * XMM, XMM.
1020 */
1021 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1023 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1024 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1025 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1026 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
1027 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1028 IEM_MC_PREPARE_SSE_USAGE();
1029 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1030 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1031 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
1032 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1033 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1034
1035 IEM_MC_ADVANCE_RIP_AND_FINISH();
1036 IEM_MC_END();
1037 }
1038 else
1039 {
1040 /*
1041 * XMM, [mem64].
1042 */
1043 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1044 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1045 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
1046 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1047 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1048 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
1049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1050
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1053 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1054 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1055
1056 IEM_MC_PREPARE_SSE_USAGE();
1057 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1058 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
1059 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1060 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1061
1062 IEM_MC_ADVANCE_RIP_AND_FINISH();
1063 IEM_MC_END();
1064 }
1065}
1066
1067
1068/**
1069 * Common worker for SSE2 instructions on the form:
1070 * pxxxx xmm1, xmm2/mem128
1071 *
1072 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1073 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1074 *
1075 * Exceptions type 4.
1076 */
1077FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1078{
1079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1080 if (IEM_IS_MODRM_REG_MODE(bRm))
1081 {
1082 /*
1083 * XMM, XMM.
1084 */
1085 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1087 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1088 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1089 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1090 IEM_MC_PREPARE_SSE_USAGE();
1091 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1092 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1093 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1094 IEM_MC_ADVANCE_RIP_AND_FINISH();
1095 IEM_MC_END();
1096 }
1097 else
1098 {
1099 /*
1100 * XMM, [mem128].
1101 */
1102 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1103 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1104 IEM_MC_LOCAL(RTUINT128U, uSrc);
1105 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1107
1108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1110 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1111 /** @todo Most CPUs probably only read the high qword. We read everything to
1112 * make sure we apply segmentation and alignment checks correctly.
1113 * When we have time, it would be interesting to explore what real
1114 * CPUs actually does and whether it will do a TLB load for the lower
1115 * part or skip any associated \#PF. */
1116 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1117
1118 IEM_MC_PREPARE_SSE_USAGE();
1119 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1120 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1121
1122 IEM_MC_ADVANCE_RIP_AND_FINISH();
1123 IEM_MC_END();
1124 }
1125}
1126
1127
1128/**
1129 * Common worker for SSE3 instructions on the forms:
1130 * hxxx xmm1, xmm2/mem128
1131 *
1132 * Proper alignment of the 128-bit operand is enforced.
1133 * Exceptions type 2. SSE3 cpuid checks.
1134 *
1135 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1136 */
1137FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1138{
1139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1140 if (IEM_IS_MODRM_REG_MODE(bRm))
1141 {
1142 /*
1143 * XMM, XMM.
1144 */
1145 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1147 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1148 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1149 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1150 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1151 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1152 IEM_MC_PREPARE_SSE_USAGE();
1153 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1154 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1155 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1156 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1157 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1158
1159 IEM_MC_ADVANCE_RIP_AND_FINISH();
1160 IEM_MC_END();
1161 }
1162 else
1163 {
1164 /*
1165 * XMM, [mem128].
1166 */
1167 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1168 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1169 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1170 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1171 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1172 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1174
1175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1177 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1178 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1179
1180 IEM_MC_PREPARE_SSE_USAGE();
1181 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1182 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1183 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1184 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1185
1186 IEM_MC_ADVANCE_RIP_AND_FINISH();
1187 IEM_MC_END();
1188 }
1189}
1190
1191
1192/** Opcode 0x0f 0x00 /0. */
1193FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1194{
1195 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1196 IEMOP_HLP_MIN_286();
1197 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1198
1199 if (IEM_IS_MODRM_REG_MODE(bRm))
1200 {
1201 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1202 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1203 iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1204 }
1205
1206 /* Ignore operand size here, memory refs are always 16-bit. */
1207 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1208 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1210 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1211 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1212 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1213 IEM_MC_END();
1214}
1215
1216
1217/** Opcode 0x0f 0x00 /1. */
1218FNIEMOPRM_DEF(iemOp_Grp6_str)
1219{
1220 IEMOP_MNEMONIC(str, "str Rv/Mw");
1221 IEMOP_HLP_MIN_286();
1222 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1223
1224
1225 if (IEM_IS_MODRM_REG_MODE(bRm))
1226 {
1227 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1228 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1229 iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1230 }
1231
1232 /* Ignore operand size here, memory refs are always 16-bit. */
1233 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1234 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1236 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1237 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1238 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1239 IEM_MC_END();
1240}
1241
1242
1243/** Opcode 0x0f 0x00 /2. */
1244FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1245{
1246 IEMOP_MNEMONIC(lldt, "lldt Ew");
1247 IEMOP_HLP_MIN_286();
1248 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1249
1250 if (IEM_IS_MODRM_REG_MODE(bRm))
1251 {
1252 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_286, 0);
1253 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1254 IEM_MC_ARG(uint16_t, u16Sel, 0);
1255 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1256 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1257 IEM_MC_END();
1258 }
1259 else
1260 {
1261 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_286, 0);
1262 IEM_MC_ARG(uint16_t, u16Sel, 0);
1263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1265 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1266 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1267 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1268 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lldt, u16Sel);
1269 IEM_MC_END();
1270 }
1271}
1272
1273
1274/** Opcode 0x0f 0x00 /3. */
1275FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1276{
1277 IEMOP_MNEMONIC(ltr, "ltr Ew");
1278 IEMOP_HLP_MIN_286();
1279 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1280
1281 if (IEM_IS_MODRM_REG_MODE(bRm))
1282 {
1283 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_286, 0);
1284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1285 IEM_MC_ARG(uint16_t, u16Sel, 0);
1286 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1287 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1288 IEM_MC_END();
1289 }
1290 else
1291 {
1292 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_286, 0);
1293 IEM_MC_ARG(uint16_t, u16Sel, 0);
1294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1297 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1298 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1299 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_ltr, u16Sel);
1300 IEM_MC_END();
1301 }
1302}
1303
1304
1305/** Opcode 0x0f 0x00 /3. */
1306FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1307{
1308 IEMOP_HLP_MIN_286();
1309 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1310
1311 if (IEM_IS_MODRM_REG_MODE(bRm))
1312 {
1313 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1314 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1315 IEM_MC_ARG(uint16_t, u16Sel, 0);
1316 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1317 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1318 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg);
1319 IEM_MC_END();
1320 }
1321 else
1322 {
1323 IEM_MC_BEGIN(2, 1, IEM_MC_F_MIN_286, 0);
1324 IEM_MC_ARG(uint16_t, u16Sel, 0);
1325 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1327 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1328 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1329 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1330 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_VerX, u16Sel, fWriteArg);
1331 IEM_MC_END();
1332 }
1333}
1334
1335
1336/** Opcode 0x0f 0x00 /4. */
1337FNIEMOPRM_DEF(iemOp_Grp6_verr)
1338{
1339 IEMOP_MNEMONIC(verr, "verr Ew");
1340 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1341}
1342
1343
1344/** Opcode 0x0f 0x00 /5. */
1345FNIEMOPRM_DEF(iemOp_Grp6_verw)
1346{
1347 IEMOP_MNEMONIC(verw, "verw Ew");
1348 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1349}
1350
1351
1352/**
1353 * Group 6 jump table.
1354 */
1355IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1356{
1357 iemOp_Grp6_sldt,
1358 iemOp_Grp6_str,
1359 iemOp_Grp6_lldt,
1360 iemOp_Grp6_ltr,
1361 iemOp_Grp6_verr,
1362 iemOp_Grp6_verw,
1363 iemOp_InvalidWithRM,
1364 iemOp_InvalidWithRM
1365};
1366
1367/** Opcode 0x0f 0x00. */
1368FNIEMOP_DEF(iemOp_Grp6)
1369{
1370 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1371 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1372}
1373
1374
1375/** Opcode 0x0f 0x01 /0. */
1376FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1377{
1378 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1379 IEMOP_HLP_MIN_286();
1380 IEMOP_HLP_64BIT_OP_SIZE();
1381 IEM_MC_BEGIN(2, 1, IEM_MC_F_MIN_286, 0);
1382 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1385 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1386 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1387 IEM_MC_END();
1388}
1389
1390
1391/** Opcode 0x0f 0x01 /0. */
1392FNIEMOP_DEF(iemOp_Grp7_vmcall)
1393{
1394 IEMOP_MNEMONIC(vmcall, "vmcall");
1395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1396
1397 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1398 want all hypercalls regardless of instruction used, and if a
1399 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1400 (NEM/win makes ASSUMPTIONS about this behavior.) */
1401 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmcall);
1402}
1403
1404
1405/** Opcode 0x0f 0x01 /0. */
1406#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1407FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1408{
1409 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1410 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1411 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1412 IEMOP_HLP_DONE_DECODING();
1413 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1414 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1415 iemCImpl_vmlaunch);
1416}
1417#else
1418FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1419{
1420 IEMOP_BITCH_ABOUT_STUB();
1421 IEMOP_RAISE_INVALID_OPCODE_RET();
1422}
1423#endif
1424
1425
1426/** Opcode 0x0f 0x01 /0. */
1427#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1428FNIEMOP_DEF(iemOp_Grp7_vmresume)
1429{
1430 IEMOP_MNEMONIC(vmresume, "vmresume");
1431 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1432 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1433 IEMOP_HLP_DONE_DECODING();
1434 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1435 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1436 iemCImpl_vmresume);
1437}
1438#else
1439FNIEMOP_DEF(iemOp_Grp7_vmresume)
1440{
1441 IEMOP_BITCH_ABOUT_STUB();
1442 IEMOP_RAISE_INVALID_OPCODE_RET();
1443}
1444#endif
1445
1446
1447/** Opcode 0x0f 0x01 /0. */
1448#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1449FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1450{
1451 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1452 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1453 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1454 IEMOP_HLP_DONE_DECODING();
1455 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmxoff);
1456}
1457#else
1458FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1459{
1460 IEMOP_BITCH_ABOUT_STUB();
1461 IEMOP_RAISE_INVALID_OPCODE_RET();
1462}
1463#endif
1464
1465
1466/** Opcode 0x0f 0x01 /1. */
1467FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1468{
1469 IEMOP_MNEMONIC(sidt, "sidt Ms");
1470 IEMOP_HLP_MIN_286();
1471 IEMOP_HLP_64BIT_OP_SIZE();
1472 IEM_MC_BEGIN(2, 1, IEM_MC_F_MIN_286, 0);
1473 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1476 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1477 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1478 IEM_MC_END();
1479}
1480
1481
1482/** Opcode 0x0f 0x01 /1. */
1483FNIEMOP_DEF(iemOp_Grp7_monitor)
1484{
1485 IEMOP_MNEMONIC(monitor, "monitor");
1486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1487 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1488}
1489
1490
1491/** Opcode 0x0f 0x01 /1. */
1492FNIEMOP_DEF(iemOp_Grp7_mwait)
1493{
1494 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1496 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_mwait);
1497}
1498
1499
1500/** Opcode 0x0f 0x01 /2. */
1501FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1502{
1503 IEMOP_MNEMONIC(lgdt, "lgdt");
1504 IEMOP_HLP_64BIT_OP_SIZE();
1505 IEM_MC_BEGIN(3, 1, 0, 0);
1506 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1509 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1510 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1511 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1512 IEM_MC_END();
1513}
1514
1515
1516/** Opcode 0x0f 0x01 0xd0. */
1517FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1518{
1519 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1520 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1521 {
1522 /** @todo r=ramshankar: We should use
1523 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1524 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1525/** @todo testcase: test prefixes and exceptions. currently not checking for the
1526 * OPSIZE one ... */
1527 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1528 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
1529 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1530 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
1531 iemCImpl_xgetbv);
1532 }
1533 IEMOP_RAISE_INVALID_OPCODE_RET();
1534}
1535
1536
1537/** Opcode 0x0f 0x01 0xd1. */
1538FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1539{
1540 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1541 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1542 {
1543 /** @todo r=ramshankar: We should use
1544 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1545 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1546/** @todo testcase: test prefixes and exceptions. currently not checking for the
1547 * OPSIZE one ... */
1548 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1549 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_xsetbv);
1550 }
1551 IEMOP_RAISE_INVALID_OPCODE_RET();
1552}
1553
1554
1555/** Opcode 0x0f 0x01 /3. */
1556FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1557{
1558 IEMOP_MNEMONIC(lidt, "lidt");
1559 IEMMODE enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : pVCpu->iem.s.enmEffOpSize;
1560 IEM_MC_BEGIN(3, 1, 0, 0);
1561 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1564 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1565 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg, /*=*/ enmEffOpSize, 2);
1566 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1567 IEM_MC_END();
1568}
1569
1570
1571/** Opcode 0x0f 0x01 0xd8. */
1572#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1573FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1574{
1575 IEMOP_MNEMONIC(vmrun, "vmrun");
1576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1577 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1578 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
1579 iemCImpl_vmrun);
1580}
1581#else
1582FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1583#endif
1584
1585/** Opcode 0x0f 0x01 0xd9. */
1586FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1587{
1588 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1589 /** @todo r=bird: Table A-8 on page 524 in vol 3 has VMGEXIT for this
1590 * opcode sequence when F3 or F2 is used as prefix. So, the assumtion
1591 * here cannot be right... */
1592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1593
1594 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1595 want all hypercalls regardless of instruction used, and if a
1596 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1597 (NEM/win makes ASSUMPTIONS about this behavior.) */
1598 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0, iemCImpl_vmmcall);
1599}
1600
1601/** Opcode 0x0f 0x01 0xda. */
1602#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1603FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1604{
1605 IEMOP_MNEMONIC(vmload, "vmload");
1606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1607 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmload);
1608}
1609#else
1610FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1611#endif
1612
1613
1614/** Opcode 0x0f 0x01 0xdb. */
1615#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1616FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1617{
1618 IEMOP_MNEMONIC(vmsave, "vmsave");
1619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1620 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmsave);
1621}
1622#else
1623FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1624#endif
1625
1626
1627/** Opcode 0x0f 0x01 0xdc. */
1628#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1629FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1630{
1631 IEMOP_MNEMONIC(stgi, "stgi");
1632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1633 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_stgi);
1634}
1635#else
1636FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1637#endif
1638
1639
1640/** Opcode 0x0f 0x01 0xdd. */
1641#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1642FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1643{
1644 IEMOP_MNEMONIC(clgi, "clgi");
1645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1646 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clgi);
1647}
1648#else
1649FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1650#endif
1651
1652
1653/** Opcode 0x0f 0x01 0xdf. */
1654#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1655FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1656{
1657 IEMOP_MNEMONIC(invlpga, "invlpga");
1658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1659 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpga);
1660}
1661#else
1662FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1663#endif
1664
1665
1666/** Opcode 0x0f 0x01 0xde. */
1667#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1668FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1669{
1670 IEMOP_MNEMONIC(skinit, "skinit");
1671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1672 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_skinit);
1673}
1674#else
1675FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1676#endif
1677
1678
1679/** Opcode 0x0f 0x01 /4. */
1680FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1681{
1682 IEMOP_MNEMONIC(smsw, "smsw");
1683 IEMOP_HLP_MIN_286();
1684 if (IEM_IS_MODRM_REG_MODE(bRm))
1685 {
1686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1687 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
1688 iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1689 }
1690
1691 /* Ignore operand size here, memory refs are always 16-bit. */
1692 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1693 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1696 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1697 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1698 IEM_MC_END();
1699}
1700
1701
1702/** Opcode 0x0f 0x01 /6. */
1703FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1704{
1705 /* The operand size is effectively ignored, all is 16-bit and only the
1706 lower 3-bits are used. */
1707 IEMOP_MNEMONIC(lmsw, "lmsw");
1708 IEMOP_HLP_MIN_286();
1709 if (IEM_IS_MODRM_REG_MODE(bRm))
1710 {
1711 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1713 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1714 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1715 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1716 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1717 IEM_MC_END();
1718 }
1719 else
1720 {
1721 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1722 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1723 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1726 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1727 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1728 IEM_MC_END();
1729 }
1730}
1731
1732
1733/** Opcode 0x0f 0x01 /7. */
1734FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1735{
1736 IEMOP_MNEMONIC(invlpg, "invlpg");
1737 IEMOP_HLP_MIN_486();
1738 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
1739 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1742 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invlpg, GCPtrEffDst);
1743 IEM_MC_END();
1744}
1745
1746
1747/** Opcode 0x0f 0x01 0xf8. */
1748FNIEMOP_DEF(iemOp_Grp7_swapgs)
1749{
1750 IEMOP_MNEMONIC(swapgs, "swapgs");
1751 IEMOP_HLP_ONLY_64BIT();
1752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1753 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS), iemCImpl_swapgs);
1754}
1755
1756
1757/** Opcode 0x0f 0x01 0xf9. */
1758FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1759{
1760 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1762 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
1763 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
1764 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
1765 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
1766 iemCImpl_rdtscp);
1767}
1768
1769
1770/**
1771 * Group 7 jump table, memory variant.
1772 */
1773IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1774{
1775 iemOp_Grp7_sgdt,
1776 iemOp_Grp7_sidt,
1777 iemOp_Grp7_lgdt,
1778 iemOp_Grp7_lidt,
1779 iemOp_Grp7_smsw,
1780 iemOp_InvalidWithRM,
1781 iemOp_Grp7_lmsw,
1782 iemOp_Grp7_invlpg
1783};
1784
1785
1786/** Opcode 0x0f 0x01. */
1787FNIEMOP_DEF(iemOp_Grp7)
1788{
1789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1790 if (IEM_IS_MODRM_MEM_MODE(bRm))
1791 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1792
1793 switch (IEM_GET_MODRM_REG_8(bRm))
1794 {
1795 case 0:
1796 switch (IEM_GET_MODRM_RM_8(bRm))
1797 {
1798 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1799 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1800 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1801 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1802 }
1803 IEMOP_RAISE_INVALID_OPCODE_RET();
1804
1805 case 1:
1806 switch (IEM_GET_MODRM_RM_8(bRm))
1807 {
1808 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1809 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1810 }
1811 IEMOP_RAISE_INVALID_OPCODE_RET();
1812
1813 case 2:
1814 switch (IEM_GET_MODRM_RM_8(bRm))
1815 {
1816 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1817 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1818 }
1819 IEMOP_RAISE_INVALID_OPCODE_RET();
1820
1821 case 3:
1822 switch (IEM_GET_MODRM_RM_8(bRm))
1823 {
1824 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1825 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1826 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1827 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1828 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1829 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1830 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1831 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1832 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1833 }
1834
1835 case 4:
1836 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1837
1838 case 5:
1839 IEMOP_RAISE_INVALID_OPCODE_RET();
1840
1841 case 6:
1842 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1843
1844 case 7:
1845 switch (IEM_GET_MODRM_RM_8(bRm))
1846 {
1847 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1848 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1849 }
1850 IEMOP_RAISE_INVALID_OPCODE_RET();
1851
1852 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1853 }
1854}
1855
1856/** Opcode 0x0f 0x00 /3. */
1857FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1858{
1859 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1860 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1861
1862 if (IEM_IS_MODRM_REG_MODE(bRm))
1863 {
1864 switch (pVCpu->iem.s.enmEffOpSize)
1865 {
1866 case IEMMODE_16BIT:
1867 IEM_MC_BEGIN(3, 0, 0, 0);
1868 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1869 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1870 IEM_MC_ARG(uint16_t, u16Sel, 1);
1871 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1872
1873 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1874 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1875 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1876 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1877
1878 IEM_MC_END();
1879 break;
1880
1881 case IEMMODE_32BIT:
1882 case IEMMODE_64BIT:
1883 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
1884 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1885 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1886 IEM_MC_ARG(uint16_t, u16Sel, 1);
1887 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1888
1889 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1890 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1891 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1892 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1893
1894 IEM_MC_END();
1895 break;
1896
1897 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1898 }
1899 }
1900 else
1901 {
1902 switch (pVCpu->iem.s.enmEffOpSize)
1903 {
1904 case IEMMODE_16BIT:
1905 IEM_MC_BEGIN(3, 1, 0, 0);
1906 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1907 IEM_MC_ARG(uint16_t, u16Sel, 1);
1908 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1910
1911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1912 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1913
1914 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1915 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1916 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1917 iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1918
1919 IEM_MC_END();
1920 break;
1921
1922 case IEMMODE_32BIT:
1923 case IEMMODE_64BIT:
1924 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
1925 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1926 IEM_MC_ARG(uint16_t, u16Sel, 1);
1927 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1928 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1929
1930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1931 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1932/** @todo testcase: make sure it's a 16-bit read. */
1933
1934 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1935 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1936 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_REG(pVCpu, bRm)),
1937 iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1938
1939 IEM_MC_END();
1940 break;
1941
1942 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1943 }
1944 }
1945}
1946
1947
1948
1949/** Opcode 0x0f 0x02. */
1950FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1951{
1952 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1953 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1954}
1955
1956
1957/** Opcode 0x0f 0x03. */
1958FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1959{
1960 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1961 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1962}
1963
1964
1965/** Opcode 0x0f 0x05. */
1966FNIEMOP_DEF(iemOp_syscall)
1967{
1968 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1970 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1971 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
1972 iemCImpl_syscall);
1973}
1974
1975
1976/** Opcode 0x0f 0x06. */
1977FNIEMOP_DEF(iemOp_clts)
1978{
1979 IEMOP_MNEMONIC(clts, "clts");
1980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1981 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clts);
1982}
1983
1984
1985/** Opcode 0x0f 0x07. */
1986FNIEMOP_DEF(iemOp_sysret)
1987{
1988 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1990 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
1991 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
1992 iemCImpl_sysret, pVCpu->iem.s.enmEffOpSize);
1993}
1994
1995
1996/** Opcode 0x0f 0x08. */
1997FNIEMOP_DEF(iemOp_invd)
1998{
1999 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
2000 IEMOP_HLP_MIN_486();
2001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2002 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_invd);
2003}
2004
2005
2006/** Opcode 0x0f 0x09. */
2007FNIEMOP_DEF(iemOp_wbinvd)
2008{
2009 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
2010 IEMOP_HLP_MIN_486();
2011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2012 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wbinvd);
2013}
2014
2015
2016/** Opcode 0x0f 0x0b. */
2017FNIEMOP_DEF(iemOp_ud2)
2018{
2019 IEMOP_MNEMONIC(ud2, "ud2");
2020 IEMOP_RAISE_INVALID_OPCODE_RET();
2021}
2022
2023/** Opcode 0x0f 0x0d. */
2024FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
2025{
2026 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
2027 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
2028 {
2029 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
2030 IEMOP_RAISE_INVALID_OPCODE_RET();
2031 }
2032
2033 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2034 if (IEM_IS_MODRM_REG_MODE(bRm))
2035 {
2036 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
2037 IEMOP_RAISE_INVALID_OPCODE_RET();
2038 }
2039
2040 switch (IEM_GET_MODRM_REG_8(bRm))
2041 {
2042 case 2: /* Aliased to /0 for the time being. */
2043 case 4: /* Aliased to /0 for the time being. */
2044 case 5: /* Aliased to /0 for the time being. */
2045 case 6: /* Aliased to /0 for the time being. */
2046 case 7: /* Aliased to /0 for the time being. */
2047 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2048 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2049 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2050 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2051 }
2052
2053 IEM_MC_BEGIN(0, 1, 0, 0);
2054 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2057 /* Currently a NOP. */
2058 NOREF(GCPtrEffSrc);
2059 IEM_MC_ADVANCE_RIP_AND_FINISH();
2060 IEM_MC_END();
2061}
2062
2063
2064/** Opcode 0x0f 0x0e. */
2065FNIEMOP_DEF(iemOp_femms)
2066{
2067 IEMOP_MNEMONIC(femms, "femms");
2068
2069 IEM_MC_BEGIN(0, 0, 0, 0);
2070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2071 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2072 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2073 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2074 IEM_MC_FPU_FROM_MMX_MODE();
2075 IEM_MC_ADVANCE_RIP_AND_FINISH();
2076 IEM_MC_END();
2077}
2078
2079
2080/** Opcode 0x0f 0x0f. */
2081FNIEMOP_DEF(iemOp_3Dnow)
2082{
2083 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2084 {
2085 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2086 IEMOP_RAISE_INVALID_OPCODE_RET();
2087 }
2088
2089#ifdef IEM_WITH_3DNOW
2090 /* This is pretty sparse, use switch instead of table. */
2091 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2092 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2093#else
2094 IEMOP_BITCH_ABOUT_STUB();
2095 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2096#endif
2097}
2098
2099
2100/**
2101 * @opcode 0x10
2102 * @oppfx none
2103 * @opcpuid sse
2104 * @opgroup og_sse_simdfp_datamove
2105 * @opxcpttype 4UA
2106 * @optest op1=1 op2=2 -> op1=2
2107 * @optest op1=0 op2=-22 -> op1=-22
2108 */
2109FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2110{
2111 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2113 if (IEM_IS_MODRM_REG_MODE(bRm))
2114 {
2115 /*
2116 * XMM128, XMM128.
2117 */
2118 IEM_MC_BEGIN(0, 0, 0, 0);
2119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2120 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2121 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2122 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2123 IEM_GET_MODRM_RM(pVCpu, bRm));
2124 IEM_MC_ADVANCE_RIP_AND_FINISH();
2125 IEM_MC_END();
2126 }
2127 else
2128 {
2129 /*
2130 * XMM128, [mem128].
2131 */
2132 IEM_MC_BEGIN(0, 2, 0, 0);
2133 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2135
2136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2138 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2139 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2140
2141 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2142 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2143
2144 IEM_MC_ADVANCE_RIP_AND_FINISH();
2145 IEM_MC_END();
2146 }
2147
2148}
2149
2150
2151/**
2152 * @opcode 0x10
2153 * @oppfx 0x66
2154 * @opcpuid sse2
2155 * @opgroup og_sse2_pcksclr_datamove
2156 * @opxcpttype 4UA
2157 * @optest op1=1 op2=2 -> op1=2
2158 * @optest op1=0 op2=-42 -> op1=-42
2159 */
2160FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2161{
2162 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2163 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2164 if (IEM_IS_MODRM_REG_MODE(bRm))
2165 {
2166 /*
2167 * XMM128, XMM128.
2168 */
2169 IEM_MC_BEGIN(0, 0, 0, 0);
2170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2171 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2172 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2173 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2174 IEM_GET_MODRM_RM(pVCpu, bRm));
2175 IEM_MC_ADVANCE_RIP_AND_FINISH();
2176 IEM_MC_END();
2177 }
2178 else
2179 {
2180 /*
2181 * XMM128, [mem128].
2182 */
2183 IEM_MC_BEGIN(0, 2, 0, 0);
2184 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2186
2187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2189 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2190 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2191
2192 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2193 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2194
2195 IEM_MC_ADVANCE_RIP_AND_FINISH();
2196 IEM_MC_END();
2197 }
2198}
2199
2200
2201/**
2202 * @opcode 0x10
2203 * @oppfx 0xf3
2204 * @opcpuid sse
2205 * @opgroup og_sse_simdfp_datamove
2206 * @opxcpttype 5
2207 * @optest op1=1 op2=2 -> op1=2
2208 * @optest op1=0 op2=-22 -> op1=-22
2209 */
2210FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2211{
2212 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2214 if (IEM_IS_MODRM_REG_MODE(bRm))
2215 {
2216 /*
2217 * XMM32, XMM32.
2218 */
2219 IEM_MC_BEGIN(0, 1, 0, 0);
2220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2221 IEM_MC_LOCAL(uint32_t, uSrc);
2222
2223 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2224 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2225 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2226 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2227
2228 IEM_MC_ADVANCE_RIP_AND_FINISH();
2229 IEM_MC_END();
2230 }
2231 else
2232 {
2233 /*
2234 * XMM128, [mem32].
2235 */
2236 IEM_MC_BEGIN(0, 2, 0, 0);
2237 IEM_MC_LOCAL(uint32_t, uSrc);
2238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2239
2240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2242 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2243 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2244
2245 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2246 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2247
2248 IEM_MC_ADVANCE_RIP_AND_FINISH();
2249 IEM_MC_END();
2250 }
2251}
2252
2253
2254/**
2255 * @opcode 0x10
2256 * @oppfx 0xf2
2257 * @opcpuid sse2
2258 * @opgroup og_sse2_pcksclr_datamove
2259 * @opxcpttype 5
2260 * @optest op1=1 op2=2 -> op1=2
2261 * @optest op1=0 op2=-42 -> op1=-42
2262 */
2263FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2264{
2265 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2266 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2267 if (IEM_IS_MODRM_REG_MODE(bRm))
2268 {
2269 /*
2270 * XMM64, XMM64.
2271 */
2272 IEM_MC_BEGIN(0, 1, 0, 0);
2273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2274 IEM_MC_LOCAL(uint64_t, uSrc);
2275
2276 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2277 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2278 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2279 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2280
2281 IEM_MC_ADVANCE_RIP_AND_FINISH();
2282 IEM_MC_END();
2283 }
2284 else
2285 {
2286 /*
2287 * XMM128, [mem64].
2288 */
2289 IEM_MC_BEGIN(0, 2, 0, 0);
2290 IEM_MC_LOCAL(uint64_t, uSrc);
2291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2292
2293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2295 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2296 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2297
2298 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2299 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2300
2301 IEM_MC_ADVANCE_RIP_AND_FINISH();
2302 IEM_MC_END();
2303 }
2304}
2305
2306
2307/**
2308 * @opcode 0x11
2309 * @oppfx none
2310 * @opcpuid sse
2311 * @opgroup og_sse_simdfp_datamove
2312 * @opxcpttype 4UA
2313 * @optest op1=1 op2=2 -> op1=2
2314 * @optest op1=0 op2=-42 -> op1=-42
2315 */
2316FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2317{
2318 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2320 if (IEM_IS_MODRM_REG_MODE(bRm))
2321 {
2322 /*
2323 * XMM128, XMM128.
2324 */
2325 IEM_MC_BEGIN(0, 0, 0, 0);
2326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2327 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2328 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2329 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2330 IEM_GET_MODRM_REG(pVCpu, bRm));
2331 IEM_MC_ADVANCE_RIP_AND_FINISH();
2332 IEM_MC_END();
2333 }
2334 else
2335 {
2336 /*
2337 * [mem128], XMM128.
2338 */
2339 IEM_MC_BEGIN(0, 2, 0, 0);
2340 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2342
2343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2345 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2346 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2347
2348 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2349 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2350
2351 IEM_MC_ADVANCE_RIP_AND_FINISH();
2352 IEM_MC_END();
2353 }
2354}
2355
2356
2357/**
2358 * @opcode 0x11
2359 * @oppfx 0x66
2360 * @opcpuid sse2
2361 * @opgroup og_sse2_pcksclr_datamove
2362 * @opxcpttype 4UA
2363 * @optest op1=1 op2=2 -> op1=2
2364 * @optest op1=0 op2=-42 -> op1=-42
2365 */
2366FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2367{
2368 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2369 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2370 if (IEM_IS_MODRM_REG_MODE(bRm))
2371 {
2372 /*
2373 * XMM128, XMM128.
2374 */
2375 IEM_MC_BEGIN(0, 0, 0, 0);
2376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2377 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2378 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2379 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2380 IEM_GET_MODRM_REG(pVCpu, bRm));
2381 IEM_MC_ADVANCE_RIP_AND_FINISH();
2382 IEM_MC_END();
2383 }
2384 else
2385 {
2386 /*
2387 * [mem128], XMM128.
2388 */
2389 IEM_MC_BEGIN(0, 2, 0, 0);
2390 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2392
2393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2395 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2396 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2397
2398 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2399 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2400
2401 IEM_MC_ADVANCE_RIP_AND_FINISH();
2402 IEM_MC_END();
2403 }
2404}
2405
2406
2407/**
2408 * @opcode 0x11
2409 * @oppfx 0xf3
2410 * @opcpuid sse
2411 * @opgroup og_sse_simdfp_datamove
2412 * @opxcpttype 5
2413 * @optest op1=1 op2=2 -> op1=2
2414 * @optest op1=0 op2=-22 -> op1=-22
2415 */
2416FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2417{
2418 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2419 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2420 if (IEM_IS_MODRM_REG_MODE(bRm))
2421 {
2422 /*
2423 * XMM32, XMM32.
2424 */
2425 IEM_MC_BEGIN(0, 1, 0, 0);
2426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2427 IEM_MC_LOCAL(uint32_t, uSrc);
2428
2429 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2430 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2431 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2432 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2433
2434 IEM_MC_ADVANCE_RIP_AND_FINISH();
2435 IEM_MC_END();
2436 }
2437 else
2438 {
2439 /*
2440 * [mem32], XMM32.
2441 */
2442 IEM_MC_BEGIN(0, 2, 0, 0);
2443 IEM_MC_LOCAL(uint32_t, uSrc);
2444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2445
2446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2448 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2449 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2450
2451 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2452 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2453
2454 IEM_MC_ADVANCE_RIP_AND_FINISH();
2455 IEM_MC_END();
2456 }
2457}
2458
2459
2460/**
2461 * @opcode 0x11
2462 * @oppfx 0xf2
2463 * @opcpuid sse2
2464 * @opgroup og_sse2_pcksclr_datamove
2465 * @opxcpttype 5
2466 * @optest op1=1 op2=2 -> op1=2
2467 * @optest op1=0 op2=-42 -> op1=-42
2468 */
2469FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2470{
2471 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2473 if (IEM_IS_MODRM_REG_MODE(bRm))
2474 {
2475 /*
2476 * XMM64, XMM64.
2477 */
2478 IEM_MC_BEGIN(0, 1, 0, 0);
2479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2480 IEM_MC_LOCAL(uint64_t, uSrc);
2481
2482 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2483 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2484 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2485 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2486
2487 IEM_MC_ADVANCE_RIP_AND_FINISH();
2488 IEM_MC_END();
2489 }
2490 else
2491 {
2492 /*
2493 * [mem64], XMM64.
2494 */
2495 IEM_MC_BEGIN(0, 2, 0, 0);
2496 IEM_MC_LOCAL(uint64_t, uSrc);
2497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2498
2499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2501 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2502 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2503
2504 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2505 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2506
2507 IEM_MC_ADVANCE_RIP_AND_FINISH();
2508 IEM_MC_END();
2509 }
2510}
2511
2512
2513FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2514{
2515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2516 if (IEM_IS_MODRM_REG_MODE(bRm))
2517 {
2518 /**
2519 * @opcode 0x12
2520 * @opcodesub 11 mr/reg
2521 * @oppfx none
2522 * @opcpuid sse
2523 * @opgroup og_sse_simdfp_datamove
2524 * @opxcpttype 5
2525 * @optest op1=1 op2=2 -> op1=2
2526 * @optest op1=0 op2=-42 -> op1=-42
2527 */
2528 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2529
2530 IEM_MC_BEGIN(0, 1, 0, 0);
2531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2532 IEM_MC_LOCAL(uint64_t, uSrc);
2533
2534 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2535 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2536 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2537 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2538
2539 IEM_MC_ADVANCE_RIP_AND_FINISH();
2540 IEM_MC_END();
2541 }
2542 else
2543 {
2544 /**
2545 * @opdone
2546 * @opcode 0x12
2547 * @opcodesub !11 mr/reg
2548 * @oppfx none
2549 * @opcpuid sse
2550 * @opgroup og_sse_simdfp_datamove
2551 * @opxcpttype 5
2552 * @optest op1=1 op2=2 -> op1=2
2553 * @optest op1=0 op2=-42 -> op1=-42
2554 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2555 */
2556 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2557
2558 IEM_MC_BEGIN(0, 2, 0, 0);
2559 IEM_MC_LOCAL(uint64_t, uSrc);
2560 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2561
2562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2564 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2565 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2566
2567 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2568 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2569
2570 IEM_MC_ADVANCE_RIP_AND_FINISH();
2571 IEM_MC_END();
2572 }
2573}
2574
2575
2576/**
2577 * @opcode 0x12
2578 * @opcodesub !11 mr/reg
2579 * @oppfx 0x66
2580 * @opcpuid sse2
2581 * @opgroup og_sse2_pcksclr_datamove
2582 * @opxcpttype 5
2583 * @optest op1=1 op2=2 -> op1=2
2584 * @optest op1=0 op2=-42 -> op1=-42
2585 */
2586FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2587{
2588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2589 if (IEM_IS_MODRM_MEM_MODE(bRm))
2590 {
2591 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2592
2593 IEM_MC_BEGIN(0, 2, 0, 0);
2594 IEM_MC_LOCAL(uint64_t, uSrc);
2595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2596
2597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2599 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2600 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2601
2602 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2603 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2604
2605 IEM_MC_ADVANCE_RIP_AND_FINISH();
2606 IEM_MC_END();
2607 }
2608
2609 /**
2610 * @opdone
2611 * @opmnemonic ud660f12m3
2612 * @opcode 0x12
2613 * @opcodesub 11 mr/reg
2614 * @oppfx 0x66
2615 * @opunused immediate
2616 * @opcpuid sse
2617 * @optest ->
2618 */
2619 else
2620 IEMOP_RAISE_INVALID_OPCODE_RET();
2621}
2622
2623
2624/**
2625 * @opcode 0x12
2626 * @oppfx 0xf3
2627 * @opcpuid sse3
2628 * @opgroup og_sse3_pcksclr_datamove
2629 * @opxcpttype 4
2630 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2631 * op1=0x00000002000000020000000100000001
2632 */
2633FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2634{
2635 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2636 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2637 if (IEM_IS_MODRM_REG_MODE(bRm))
2638 {
2639 /*
2640 * XMM, XMM.
2641 */
2642 IEM_MC_BEGIN(0, 1, 0, 0);
2643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2644 IEM_MC_LOCAL(RTUINT128U, uSrc);
2645
2646 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2647 IEM_MC_PREPARE_SSE_USAGE();
2648
2649 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2650 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2651 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2652 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2653 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2654
2655 IEM_MC_ADVANCE_RIP_AND_FINISH();
2656 IEM_MC_END();
2657 }
2658 else
2659 {
2660 /*
2661 * XMM, [mem128].
2662 */
2663 IEM_MC_BEGIN(0, 2, 0, 0);
2664 IEM_MC_LOCAL(RTUINT128U, uSrc);
2665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2666
2667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2669 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2670 IEM_MC_PREPARE_SSE_USAGE();
2671
2672 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2673 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2674 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2675 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2676 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2677
2678 IEM_MC_ADVANCE_RIP_AND_FINISH();
2679 IEM_MC_END();
2680 }
2681}
2682
2683
2684/**
2685 * @opcode 0x12
2686 * @oppfx 0xf2
2687 * @opcpuid sse3
2688 * @opgroup og_sse3_pcksclr_datamove
2689 * @opxcpttype 5
2690 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2691 * op1=0x22222222111111112222222211111111
2692 */
2693FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2694{
2695 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2696 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2697 if (IEM_IS_MODRM_REG_MODE(bRm))
2698 {
2699 /*
2700 * XMM128, XMM64.
2701 */
2702 IEM_MC_BEGIN(0, 1, 0, 0);
2703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2704 IEM_MC_LOCAL(uint64_t, uSrc);
2705
2706 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2707 IEM_MC_PREPARE_SSE_USAGE();
2708
2709 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2710 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2711 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2712
2713 IEM_MC_ADVANCE_RIP_AND_FINISH();
2714 IEM_MC_END();
2715 }
2716 else
2717 {
2718 /*
2719 * XMM128, [mem64].
2720 */
2721 IEM_MC_BEGIN(0, 2, 0, 0);
2722 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2723 IEM_MC_LOCAL(uint64_t, uSrc);
2724
2725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2727 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2728 IEM_MC_PREPARE_SSE_USAGE();
2729
2730 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2731 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2732 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2733
2734 IEM_MC_ADVANCE_RIP_AND_FINISH();
2735 IEM_MC_END();
2736 }
2737}
2738
2739
2740/**
2741 * @opcode 0x13
2742 * @opcodesub !11 mr/reg
2743 * @oppfx none
2744 * @opcpuid sse
2745 * @opgroup og_sse_simdfp_datamove
2746 * @opxcpttype 5
2747 * @optest op1=1 op2=2 -> op1=2
2748 * @optest op1=0 op2=-42 -> op1=-42
2749 */
2750FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2751{
2752 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2753 if (IEM_IS_MODRM_MEM_MODE(bRm))
2754 {
2755 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2756
2757 IEM_MC_BEGIN(0, 2, 0, 0);
2758 IEM_MC_LOCAL(uint64_t, uSrc);
2759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2760
2761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2763 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2764 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2765
2766 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2767 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2768
2769 IEM_MC_ADVANCE_RIP_AND_FINISH();
2770 IEM_MC_END();
2771 }
2772
2773 /**
2774 * @opdone
2775 * @opmnemonic ud0f13m3
2776 * @opcode 0x13
2777 * @opcodesub 11 mr/reg
2778 * @oppfx none
2779 * @opunused immediate
2780 * @opcpuid sse
2781 * @optest ->
2782 */
2783 else
2784 IEMOP_RAISE_INVALID_OPCODE_RET();
2785}
2786
2787
2788/**
2789 * @opcode 0x13
2790 * @opcodesub !11 mr/reg
2791 * @oppfx 0x66
2792 * @opcpuid sse2
2793 * @opgroup og_sse2_pcksclr_datamove
2794 * @opxcpttype 5
2795 * @optest op1=1 op2=2 -> op1=2
2796 * @optest op1=0 op2=-42 -> op1=-42
2797 */
2798FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2799{
2800 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2801 if (IEM_IS_MODRM_MEM_MODE(bRm))
2802 {
2803 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2804
2805 IEM_MC_BEGIN(0, 2, 0, 0);
2806 IEM_MC_LOCAL(uint64_t, uSrc);
2807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2808
2809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2811 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2812 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2813
2814 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2815 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2816
2817 IEM_MC_ADVANCE_RIP_AND_FINISH();
2818 IEM_MC_END();
2819 }
2820
2821 /**
2822 * @opdone
2823 * @opmnemonic ud660f13m3
2824 * @opcode 0x13
2825 * @opcodesub 11 mr/reg
2826 * @oppfx 0x66
2827 * @opunused immediate
2828 * @opcpuid sse
2829 * @optest ->
2830 */
2831 else
2832 IEMOP_RAISE_INVALID_OPCODE_RET();
2833}
2834
2835
2836/**
2837 * @opmnemonic udf30f13
2838 * @opcode 0x13
2839 * @oppfx 0xf3
2840 * @opunused intel-modrm
2841 * @opcpuid sse
2842 * @optest ->
2843 * @opdone
2844 */
2845
2846/**
2847 * @opmnemonic udf20f13
2848 * @opcode 0x13
2849 * @oppfx 0xf2
2850 * @opunused intel-modrm
2851 * @opcpuid sse
2852 * @optest ->
2853 * @opdone
2854 */
2855
2856/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2857FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2858{
2859 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2860 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2861}
2862
2863
2864/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2865FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2866{
2867 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2868 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2869}
2870
2871
2872/**
2873 * @opdone
2874 * @opmnemonic udf30f14
2875 * @opcode 0x14
2876 * @oppfx 0xf3
2877 * @opunused intel-modrm
2878 * @opcpuid sse
2879 * @optest ->
2880 * @opdone
2881 */
2882
2883/**
2884 * @opmnemonic udf20f14
2885 * @opcode 0x14
2886 * @oppfx 0xf2
2887 * @opunused intel-modrm
2888 * @opcpuid sse
2889 * @optest ->
2890 * @opdone
2891 */
2892
2893/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2894FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2895{
2896 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2897 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2898}
2899
2900
2901/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2902FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2903{
2904 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2905 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2906}
2907
2908
2909/* Opcode 0xf3 0x0f 0x15 - invalid */
2910/* Opcode 0xf2 0x0f 0x15 - invalid */
2911
2912/**
2913 * @opdone
2914 * @opmnemonic udf30f15
2915 * @opcode 0x15
2916 * @oppfx 0xf3
2917 * @opunused intel-modrm
2918 * @opcpuid sse
2919 * @optest ->
2920 * @opdone
2921 */
2922
2923/**
2924 * @opmnemonic udf20f15
2925 * @opcode 0x15
2926 * @oppfx 0xf2
2927 * @opunused intel-modrm
2928 * @opcpuid sse
2929 * @optest ->
2930 * @opdone
2931 */
2932
2933FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2934{
2935 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2936 if (IEM_IS_MODRM_REG_MODE(bRm))
2937 {
2938 /**
2939 * @opcode 0x16
2940 * @opcodesub 11 mr/reg
2941 * @oppfx none
2942 * @opcpuid sse
2943 * @opgroup og_sse_simdfp_datamove
2944 * @opxcpttype 5
2945 * @optest op1=1 op2=2 -> op1=2
2946 * @optest op1=0 op2=-42 -> op1=-42
2947 */
2948 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2949
2950 IEM_MC_BEGIN(0, 1, 0, 0);
2951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2952 IEM_MC_LOCAL(uint64_t, uSrc);
2953
2954 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2955 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2956 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2957 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2958
2959 IEM_MC_ADVANCE_RIP_AND_FINISH();
2960 IEM_MC_END();
2961 }
2962 else
2963 {
2964 /**
2965 * @opdone
2966 * @opcode 0x16
2967 * @opcodesub !11 mr/reg
2968 * @oppfx none
2969 * @opcpuid sse
2970 * @opgroup og_sse_simdfp_datamove
2971 * @opxcpttype 5
2972 * @optest op1=1 op2=2 -> op1=2
2973 * @optest op1=0 op2=-42 -> op1=-42
2974 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2975 */
2976 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2977
2978 IEM_MC_BEGIN(0, 2, 0, 0);
2979 IEM_MC_LOCAL(uint64_t, uSrc);
2980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2981
2982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2984 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2985 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2986
2987 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2988 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2989
2990 IEM_MC_ADVANCE_RIP_AND_FINISH();
2991 IEM_MC_END();
2992 }
2993}
2994
2995
2996/**
2997 * @opcode 0x16
2998 * @opcodesub !11 mr/reg
2999 * @oppfx 0x66
3000 * @opcpuid sse2
3001 * @opgroup og_sse2_pcksclr_datamove
3002 * @opxcpttype 5
3003 * @optest op1=1 op2=2 -> op1=2
3004 * @optest op1=0 op2=-42 -> op1=-42
3005 */
3006FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
3007{
3008 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3009 if (IEM_IS_MODRM_MEM_MODE(bRm))
3010 {
3011 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3012
3013 IEM_MC_BEGIN(0, 2, 0, 0);
3014 IEM_MC_LOCAL(uint64_t, uSrc);
3015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3016
3017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3019 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3020 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3021
3022 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3023 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3024
3025 IEM_MC_ADVANCE_RIP_AND_FINISH();
3026 IEM_MC_END();
3027 }
3028
3029 /**
3030 * @opdone
3031 * @opmnemonic ud660f16m3
3032 * @opcode 0x16
3033 * @opcodesub 11 mr/reg
3034 * @oppfx 0x66
3035 * @opunused immediate
3036 * @opcpuid sse
3037 * @optest ->
3038 */
3039 else
3040 IEMOP_RAISE_INVALID_OPCODE_RET();
3041}
3042
3043
3044/**
3045 * @opcode 0x16
3046 * @oppfx 0xf3
3047 * @opcpuid sse3
3048 * @opgroup og_sse3_pcksclr_datamove
3049 * @opxcpttype 4
3050 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3051 * op1=0x00000002000000020000000100000001
3052 */
3053FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3054{
3055 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3056 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3057 if (IEM_IS_MODRM_REG_MODE(bRm))
3058 {
3059 /*
3060 * XMM128, XMM128.
3061 */
3062 IEM_MC_BEGIN(0, 1, 0, 0);
3063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3064 IEM_MC_LOCAL(RTUINT128U, uSrc);
3065
3066 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3067 IEM_MC_PREPARE_SSE_USAGE();
3068
3069 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3070 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3071 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3072 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3073 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3074
3075 IEM_MC_ADVANCE_RIP_AND_FINISH();
3076 IEM_MC_END();
3077 }
3078 else
3079 {
3080 /*
3081 * XMM128, [mem128].
3082 */
3083 IEM_MC_BEGIN(0, 2, 0, 0);
3084 IEM_MC_LOCAL(RTUINT128U, uSrc);
3085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3086
3087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3089 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3090 IEM_MC_PREPARE_SSE_USAGE();
3091
3092 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3093 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3094 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3095 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3096 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3097
3098 IEM_MC_ADVANCE_RIP_AND_FINISH();
3099 IEM_MC_END();
3100 }
3101}
3102
3103/**
3104 * @opdone
3105 * @opmnemonic udf30f16
3106 * @opcode 0x16
3107 * @oppfx 0xf2
3108 * @opunused intel-modrm
3109 * @opcpuid sse
3110 * @optest ->
3111 * @opdone
3112 */
3113
3114
3115/**
3116 * @opcode 0x17
3117 * @opcodesub !11 mr/reg
3118 * @oppfx none
3119 * @opcpuid sse
3120 * @opgroup og_sse_simdfp_datamove
3121 * @opxcpttype 5
3122 * @optest op1=1 op2=2 -> op1=2
3123 * @optest op1=0 op2=-42 -> op1=-42
3124 */
3125FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3126{
3127 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3128 if (IEM_IS_MODRM_MEM_MODE(bRm))
3129 {
3130 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3131
3132 IEM_MC_BEGIN(0, 2, 0, 0);
3133 IEM_MC_LOCAL(uint64_t, uSrc);
3134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3135
3136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3138 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3139 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3140
3141 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3142 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3143
3144 IEM_MC_ADVANCE_RIP_AND_FINISH();
3145 IEM_MC_END();
3146 }
3147
3148 /**
3149 * @opdone
3150 * @opmnemonic ud0f17m3
3151 * @opcode 0x17
3152 * @opcodesub 11 mr/reg
3153 * @oppfx none
3154 * @opunused immediate
3155 * @opcpuid sse
3156 * @optest ->
3157 */
3158 else
3159 IEMOP_RAISE_INVALID_OPCODE_RET();
3160}
3161
3162
3163/**
3164 * @opcode 0x17
3165 * @opcodesub !11 mr/reg
3166 * @oppfx 0x66
3167 * @opcpuid sse2
3168 * @opgroup og_sse2_pcksclr_datamove
3169 * @opxcpttype 5
3170 * @optest op1=1 op2=2 -> op1=2
3171 * @optest op1=0 op2=-42 -> op1=-42
3172 */
3173FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3174{
3175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3176 if (IEM_IS_MODRM_MEM_MODE(bRm))
3177 {
3178 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3179
3180 IEM_MC_BEGIN(0, 2, 0, 0);
3181 IEM_MC_LOCAL(uint64_t, uSrc);
3182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3183
3184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3186 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3187 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3188
3189 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3190 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3191
3192 IEM_MC_ADVANCE_RIP_AND_FINISH();
3193 IEM_MC_END();
3194 }
3195
3196 /**
3197 * @opdone
3198 * @opmnemonic ud660f17m3
3199 * @opcode 0x17
3200 * @opcodesub 11 mr/reg
3201 * @oppfx 0x66
3202 * @opunused immediate
3203 * @opcpuid sse
3204 * @optest ->
3205 */
3206 else
3207 IEMOP_RAISE_INVALID_OPCODE_RET();
3208}
3209
3210
3211/**
3212 * @opdone
3213 * @opmnemonic udf30f17
3214 * @opcode 0x17
3215 * @oppfx 0xf3
3216 * @opunused intel-modrm
3217 * @opcpuid sse
3218 * @optest ->
3219 * @opdone
3220 */
3221
3222/**
3223 * @opmnemonic udf20f17
3224 * @opcode 0x17
3225 * @oppfx 0xf2
3226 * @opunused intel-modrm
3227 * @opcpuid sse
3228 * @optest ->
3229 * @opdone
3230 */
3231
3232
3233/** Opcode 0x0f 0x18. */
3234FNIEMOP_DEF(iemOp_prefetch_Grp16)
3235{
3236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3237 if (IEM_IS_MODRM_MEM_MODE(bRm))
3238 {
3239 switch (IEM_GET_MODRM_REG_8(bRm))
3240 {
3241 case 4: /* Aliased to /0 for the time being according to AMD. */
3242 case 5: /* Aliased to /0 for the time being according to AMD. */
3243 case 6: /* Aliased to /0 for the time being according to AMD. */
3244 case 7: /* Aliased to /0 for the time being according to AMD. */
3245 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3246 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3247 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3248 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3249 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3250 }
3251
3252 IEM_MC_BEGIN(0, 1, 0, 0);
3253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3256 /* Currently a NOP. */
3257 NOREF(GCPtrEffSrc);
3258 IEM_MC_ADVANCE_RIP_AND_FINISH();
3259 IEM_MC_END();
3260 }
3261 else
3262 IEMOP_RAISE_INVALID_OPCODE_RET();
3263}
3264
3265
3266/** Opcode 0x0f 0x19..0x1f. */
3267FNIEMOP_DEF(iemOp_nop_Ev)
3268{
3269 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3270 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3271 if (IEM_IS_MODRM_REG_MODE(bRm))
3272 {
3273 IEM_MC_BEGIN(0, 0, 0, 0);
3274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3275 IEM_MC_ADVANCE_RIP_AND_FINISH();
3276 IEM_MC_END();
3277 }
3278 else
3279 {
3280 IEM_MC_BEGIN(0, 1, 0, 0);
3281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3284 /* Currently a NOP. */
3285 NOREF(GCPtrEffSrc);
3286 IEM_MC_ADVANCE_RIP_AND_FINISH();
3287 IEM_MC_END();
3288 }
3289}
3290
3291
3292/** Opcode 0x0f 0x20. */
3293FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3294{
3295 /* mod is ignored, as is operand size overrides. */
3296/** @todo testcase: check memory encoding. */
3297 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3298 IEMOP_HLP_MIN_386();
3299 if (IEM_IS_64BIT_CODE(pVCpu))
3300 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3301 else
3302 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3303
3304 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3305 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3306 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3307 {
3308 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3309 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3310 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3311 iCrReg |= 8;
3312 }
3313 switch (iCrReg)
3314 {
3315 case 0: case 2: case 3: case 4: case 8:
3316 break;
3317 default:
3318 IEMOP_RAISE_INVALID_OPCODE_RET();
3319 }
3320 IEMOP_HLP_DONE_DECODING();
3321
3322 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3323 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3324 iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3325}
3326
3327
3328/** Opcode 0x0f 0x21. */
3329FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3330{
3331/** @todo testcase: check memory encoding. */
3332 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3333 IEMOP_HLP_MIN_386();
3334 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3336 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3337 IEMOP_RAISE_INVALID_OPCODE_RET();
3338 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3339 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3340 iemCImpl_mov_Rd_Dd, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3341}
3342
3343
3344/** Opcode 0x0f 0x22. */
3345FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3346{
3347 /* mod is ignored, as is operand size overrides. */
3348 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3349 IEMOP_HLP_MIN_386();
3350 if (IEM_IS_64BIT_CODE(pVCpu))
3351 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3352 else
3353 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3354
3355 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3356 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3357 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3358 {
3359 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3360 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3361 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3362 iCrReg |= 8;
3363 }
3364 switch (iCrReg)
3365 {
3366 case 0: case 2: case 3: case 4: case 8:
3367 break;
3368 default:
3369 IEMOP_RAISE_INVALID_OPCODE_RET();
3370 }
3371 IEMOP_HLP_DONE_DECODING();
3372
3373 if (iCrReg & (2 | 8))
3374 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, 0,
3375 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3376 else
3377 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, 0,
3378 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3379}
3380
3381
3382/** Opcode 0x0f 0x23. */
3383FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3384{
3385 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3386 IEMOP_HLP_MIN_386();
3387 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3389 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3390 IEMOP_RAISE_INVALID_OPCODE_RET();
3391 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, 0,
3392 iemCImpl_mov_Dd_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3393}
3394
3395
3396/** Opcode 0x0f 0x24. */
3397FNIEMOP_DEF(iemOp_mov_Rd_Td)
3398{
3399 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3400 IEMOP_HLP_MIN_386();
3401 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3403 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3404 IEMOP_RAISE_INVALID_OPCODE_RET();
3405 IEM_MC_DEFER_TO_CIMPL_2_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
3406 iemCImpl_mov_Rd_Td, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3407}
3408
3409
3410/** Opcode 0x0f 0x26. */
3411FNIEMOP_DEF(iemOp_mov_Td_Rd)
3412{
3413 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3414 IEMOP_HLP_MIN_386();
3415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3417 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3418 IEMOP_RAISE_INVALID_OPCODE_RET();
3419 IEM_MC_DEFER_TO_CIMPL_2_RET(0, 0, iemCImpl_mov_Td_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3420}
3421
3422
3423/**
3424 * @opcode 0x28
3425 * @oppfx none
3426 * @opcpuid sse
3427 * @opgroup og_sse_simdfp_datamove
3428 * @opxcpttype 1
3429 * @optest op1=1 op2=2 -> op1=2
3430 * @optest op1=0 op2=-42 -> op1=-42
3431 */
3432FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3433{
3434 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3435 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3436 if (IEM_IS_MODRM_REG_MODE(bRm))
3437 {
3438 /*
3439 * Register, register.
3440 */
3441 IEM_MC_BEGIN(0, 0, 0, 0);
3442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3443 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3444 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3445 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3446 IEM_GET_MODRM_RM(pVCpu, bRm));
3447 IEM_MC_ADVANCE_RIP_AND_FINISH();
3448 IEM_MC_END();
3449 }
3450 else
3451 {
3452 /*
3453 * Register, memory.
3454 */
3455 IEM_MC_BEGIN(0, 2, 0, 0);
3456 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3458
3459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3461 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3462 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3463
3464 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3465 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3466
3467 IEM_MC_ADVANCE_RIP_AND_FINISH();
3468 IEM_MC_END();
3469 }
3470}
3471
3472/**
3473 * @opcode 0x28
3474 * @oppfx 66
3475 * @opcpuid sse2
3476 * @opgroup og_sse2_pcksclr_datamove
3477 * @opxcpttype 1
3478 * @optest op1=1 op2=2 -> op1=2
3479 * @optest op1=0 op2=-42 -> op1=-42
3480 */
3481FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3482{
3483 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3484 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3485 if (IEM_IS_MODRM_REG_MODE(bRm))
3486 {
3487 /*
3488 * Register, register.
3489 */
3490 IEM_MC_BEGIN(0, 0, 0, 0);
3491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3492 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3493 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3494 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3495 IEM_GET_MODRM_RM(pVCpu, bRm));
3496 IEM_MC_ADVANCE_RIP_AND_FINISH();
3497 IEM_MC_END();
3498 }
3499 else
3500 {
3501 /*
3502 * Register, memory.
3503 */
3504 IEM_MC_BEGIN(0, 2, 0, 0);
3505 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3507
3508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3510 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3511 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3512
3513 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3514 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3515
3516 IEM_MC_ADVANCE_RIP_AND_FINISH();
3517 IEM_MC_END();
3518 }
3519}
3520
3521/* Opcode 0xf3 0x0f 0x28 - invalid */
3522/* Opcode 0xf2 0x0f 0x28 - invalid */
3523
3524/**
3525 * @opcode 0x29
3526 * @oppfx none
3527 * @opcpuid sse
3528 * @opgroup og_sse_simdfp_datamove
3529 * @opxcpttype 1
3530 * @optest op1=1 op2=2 -> op1=2
3531 * @optest op1=0 op2=-42 -> op1=-42
3532 */
3533FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3534{
3535 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3536 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3537 if (IEM_IS_MODRM_REG_MODE(bRm))
3538 {
3539 /*
3540 * Register, register.
3541 */
3542 IEM_MC_BEGIN(0, 0, 0, 0);
3543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3544 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3545 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3546 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3547 IEM_GET_MODRM_REG(pVCpu, bRm));
3548 IEM_MC_ADVANCE_RIP_AND_FINISH();
3549 IEM_MC_END();
3550 }
3551 else
3552 {
3553 /*
3554 * Memory, register.
3555 */
3556 IEM_MC_BEGIN(0, 2, 0, 0);
3557 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3559
3560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3562 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3563 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3564
3565 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3566 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3567
3568 IEM_MC_ADVANCE_RIP_AND_FINISH();
3569 IEM_MC_END();
3570 }
3571}
3572
3573/**
3574 * @opcode 0x29
3575 * @oppfx 66
3576 * @opcpuid sse2
3577 * @opgroup og_sse2_pcksclr_datamove
3578 * @opxcpttype 1
3579 * @optest op1=1 op2=2 -> op1=2
3580 * @optest op1=0 op2=-42 -> op1=-42
3581 */
3582FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3583{
3584 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3585 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3586 if (IEM_IS_MODRM_REG_MODE(bRm))
3587 {
3588 /*
3589 * Register, register.
3590 */
3591 IEM_MC_BEGIN(0, 0, 0, 0);
3592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3593 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3594 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3595 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3596 IEM_GET_MODRM_REG(pVCpu, bRm));
3597 IEM_MC_ADVANCE_RIP_AND_FINISH();
3598 IEM_MC_END();
3599 }
3600 else
3601 {
3602 /*
3603 * Memory, register.
3604 */
3605 IEM_MC_BEGIN(0, 2, 0, 0);
3606 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3608
3609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3611 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3612 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3613
3614 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3615 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3616
3617 IEM_MC_ADVANCE_RIP_AND_FINISH();
3618 IEM_MC_END();
3619 }
3620}
3621
3622/* Opcode 0xf3 0x0f 0x29 - invalid */
3623/* Opcode 0xf2 0x0f 0x29 - invalid */
3624
3625
3626/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3627FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3628{
3629 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3630 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3631 if (IEM_IS_MODRM_REG_MODE(bRm))
3632 {
3633 /*
3634 * XMM, MMX
3635 */
3636 IEM_MC_BEGIN(3, 1, 0, 0);
3637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3638 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3639 IEM_MC_LOCAL(X86XMMREG, Dst);
3640 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3641 IEM_MC_ARG(uint64_t, u64Src, 2);
3642 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3643 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3644 IEM_MC_PREPARE_FPU_USAGE();
3645 IEM_MC_FPU_TO_MMX_MODE();
3646
3647 IEM_MC_REF_MXCSR(pfMxcsr);
3648 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3649 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3650
3651 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3652 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3653 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3654 } IEM_MC_ELSE() {
3655 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3656 } IEM_MC_ENDIF();
3657
3658 IEM_MC_ADVANCE_RIP_AND_FINISH();
3659 IEM_MC_END();
3660 }
3661 else
3662 {
3663 /*
3664 * XMM, [mem64]
3665 */
3666 IEM_MC_BEGIN(3, 2, 0, 0);
3667 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3668 IEM_MC_LOCAL(X86XMMREG, Dst);
3669 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3670 IEM_MC_ARG(uint64_t, u64Src, 2);
3671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3672
3673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3675 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3676 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3677 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3678
3679 IEM_MC_PREPARE_FPU_USAGE();
3680 IEM_MC_FPU_TO_MMX_MODE();
3681 IEM_MC_REF_MXCSR(pfMxcsr);
3682
3683 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3684 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3685 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3686 } IEM_MC_ELSE() {
3687 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3688 } IEM_MC_ENDIF();
3689
3690 IEM_MC_ADVANCE_RIP_AND_FINISH();
3691 IEM_MC_END();
3692 }
3693}
3694
3695
3696/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3697FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3698{
3699 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3701 if (IEM_IS_MODRM_REG_MODE(bRm))
3702 {
3703 /*
3704 * XMM, MMX
3705 */
3706 IEM_MC_BEGIN(3, 1, 0, 0);
3707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3708 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3709 IEM_MC_LOCAL(X86XMMREG, Dst);
3710 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3711 IEM_MC_ARG(uint64_t, u64Src, 2);
3712 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3713 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3714 IEM_MC_PREPARE_FPU_USAGE();
3715 IEM_MC_FPU_TO_MMX_MODE();
3716
3717 IEM_MC_REF_MXCSR(pfMxcsr);
3718 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3719
3720 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3721 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3722 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3723 } IEM_MC_ELSE() {
3724 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3725 } IEM_MC_ENDIF();
3726
3727 IEM_MC_ADVANCE_RIP_AND_FINISH();
3728 IEM_MC_END();
3729 }
3730 else
3731 {
3732 /*
3733 * XMM, [mem64]
3734 */
3735 IEM_MC_BEGIN(3, 3, 0, 0);
3736 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3737 IEM_MC_LOCAL(X86XMMREG, Dst);
3738 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3739 IEM_MC_ARG(uint64_t, u64Src, 2);
3740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3741
3742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3744 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3745 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3746 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3747
3748 /* Doesn't cause a transition to MMX mode. */
3749 IEM_MC_PREPARE_SSE_USAGE();
3750 IEM_MC_REF_MXCSR(pfMxcsr);
3751
3752 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3753 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3754 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3755 } IEM_MC_ELSE() {
3756 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3757 } IEM_MC_ENDIF();
3758
3759 IEM_MC_ADVANCE_RIP_AND_FINISH();
3760 IEM_MC_END();
3761 }
3762}
3763
3764
3765/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3766FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3767{
3768 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3769
3770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3771 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3772 {
3773 if (IEM_IS_MODRM_REG_MODE(bRm))
3774 {
3775 /* XMM, greg64 */
3776 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3777 IEM_MC_LOCAL(uint32_t, fMxcsr);
3778 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3779 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3780 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3781 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3782
3783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3784 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3785 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3786
3787 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3788 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3789 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3790 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3791 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3792 } IEM_MC_ELSE() {
3793 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3794 } IEM_MC_ENDIF();
3795
3796 IEM_MC_ADVANCE_RIP_AND_FINISH();
3797 IEM_MC_END();
3798 }
3799 else
3800 {
3801 /* XMM, [mem64] */
3802 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
3803 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3804 IEM_MC_LOCAL(uint32_t, fMxcsr);
3805 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3806 IEM_MC_LOCAL(int64_t, i64Src);
3807 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3808 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3809 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3810
3811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3813 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3814 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3815
3816 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3817 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3818 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3819 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3820 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3821 } IEM_MC_ELSE() {
3822 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3823 } IEM_MC_ENDIF();
3824
3825 IEM_MC_ADVANCE_RIP_AND_FINISH();
3826 IEM_MC_END();
3827 }
3828 }
3829 else
3830 {
3831 if (IEM_IS_MODRM_REG_MODE(bRm))
3832 {
3833 /* greg, XMM */
3834 IEM_MC_BEGIN(3, 2, 0, 0);
3835 IEM_MC_LOCAL(uint32_t, fMxcsr);
3836 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3837 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3838 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3839 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3840
3841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3842 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3843 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3844
3845 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3846 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3847 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3848 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3849 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3850 } IEM_MC_ELSE() {
3851 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3852 } IEM_MC_ENDIF();
3853
3854 IEM_MC_ADVANCE_RIP_AND_FINISH();
3855 IEM_MC_END();
3856 }
3857 else
3858 {
3859 /* greg, [mem32] */
3860 IEM_MC_BEGIN(3, 4, 0, 0);
3861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3862 IEM_MC_LOCAL(uint32_t, fMxcsr);
3863 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3864 IEM_MC_LOCAL(int32_t, i32Src);
3865 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3866 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3867 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3868
3869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3871 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3872 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3873
3874 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3875 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3876 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3877 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3878 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3879 } IEM_MC_ELSE() {
3880 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3881 } IEM_MC_ENDIF();
3882
3883 IEM_MC_ADVANCE_RIP_AND_FINISH();
3884 IEM_MC_END();
3885 }
3886 }
3887}
3888
3889
3890/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3891FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3892{
3893 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3894
3895 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3896 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3897 {
3898 if (IEM_IS_MODRM_REG_MODE(bRm))
3899 {
3900 /* XMM, greg64 */
3901 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3902 IEM_MC_LOCAL(uint32_t, fMxcsr);
3903 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3904 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3905 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3906 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3907
3908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3909 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3910 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3911
3912 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3913 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3914 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3915 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3916 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3917 } IEM_MC_ELSE() {
3918 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3919 } IEM_MC_ENDIF();
3920
3921 IEM_MC_ADVANCE_RIP_AND_FINISH();
3922 IEM_MC_END();
3923 }
3924 else
3925 {
3926 /* XMM, [mem64] */
3927 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
3928 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3929 IEM_MC_LOCAL(uint32_t, fMxcsr);
3930 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3931 IEM_MC_LOCAL(int64_t, i64Src);
3932 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3933 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3934 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3935
3936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3938 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3939 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3940
3941 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3942 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3943 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3944 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3945 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3946 } IEM_MC_ELSE() {
3947 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3948 } IEM_MC_ENDIF();
3949
3950 IEM_MC_ADVANCE_RIP_AND_FINISH();
3951 IEM_MC_END();
3952 }
3953 }
3954 else
3955 {
3956 if (IEM_IS_MODRM_REG_MODE(bRm))
3957 {
3958 /* XMM, greg32 */
3959 IEM_MC_BEGIN(3, 2, 0, 0);
3960 IEM_MC_LOCAL(uint32_t, fMxcsr);
3961 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3962 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3963 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3964 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3965
3966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3967 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3968 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3969
3970 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3971 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3972 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3973 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3974 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3975 } IEM_MC_ELSE() {
3976 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3977 } IEM_MC_ENDIF();
3978
3979 IEM_MC_ADVANCE_RIP_AND_FINISH();
3980 IEM_MC_END();
3981 }
3982 else
3983 {
3984 /* XMM, [mem32] */
3985 IEM_MC_BEGIN(3, 4, 0, 0);
3986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3987 IEM_MC_LOCAL(uint32_t, fMxcsr);
3988 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3989 IEM_MC_LOCAL(int32_t, i32Src);
3990 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3991 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3992 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3993
3994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3996 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3997 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3998
3999 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4000 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
4001 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4002 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4003 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4004 } IEM_MC_ELSE() {
4005 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
4006 } IEM_MC_ENDIF();
4007
4008 IEM_MC_ADVANCE_RIP_AND_FINISH();
4009 IEM_MC_END();
4010 }
4011 }
4012}
4013
4014
4015/**
4016 * @opcode 0x2b
4017 * @opcodesub !11 mr/reg
4018 * @oppfx none
4019 * @opcpuid sse
4020 * @opgroup og_sse1_cachect
4021 * @opxcpttype 1
4022 * @optest op1=1 op2=2 -> op1=2
4023 * @optest op1=0 op2=-42 -> op1=-42
4024 */
4025FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
4026{
4027 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4028 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4029 if (IEM_IS_MODRM_MEM_MODE(bRm))
4030 {
4031 /*
4032 * memory, register.
4033 */
4034 IEM_MC_BEGIN(0, 2, 0, 0);
4035 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4037
4038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4040 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4041 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4042
4043 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4044 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4045
4046 IEM_MC_ADVANCE_RIP_AND_FINISH();
4047 IEM_MC_END();
4048 }
4049 /* The register, register encoding is invalid. */
4050 else
4051 IEMOP_RAISE_INVALID_OPCODE_RET();
4052}
4053
4054/**
4055 * @opcode 0x2b
4056 * @opcodesub !11 mr/reg
4057 * @oppfx 0x66
4058 * @opcpuid sse2
4059 * @opgroup og_sse2_cachect
4060 * @opxcpttype 1
4061 * @optest op1=1 op2=2 -> op1=2
4062 * @optest op1=0 op2=-42 -> op1=-42
4063 */
4064FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
4065{
4066 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4067 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4068 if (IEM_IS_MODRM_MEM_MODE(bRm))
4069 {
4070 /*
4071 * memory, register.
4072 */
4073 IEM_MC_BEGIN(0, 2, 0, 0);
4074 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4076
4077 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4079 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4080 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4081
4082 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4083 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4084
4085 IEM_MC_ADVANCE_RIP_AND_FINISH();
4086 IEM_MC_END();
4087 }
4088 /* The register, register encoding is invalid. */
4089 else
4090 IEMOP_RAISE_INVALID_OPCODE_RET();
4091}
4092/* Opcode 0xf3 0x0f 0x2b - invalid */
4093/* Opcode 0xf2 0x0f 0x2b - invalid */
4094
4095
4096/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
4097FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
4098{
4099 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4100 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4101 if (IEM_IS_MODRM_REG_MODE(bRm))
4102 {
4103 /*
4104 * Register, register.
4105 */
4106 IEM_MC_BEGIN(3, 1, 0, 0);
4107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4108 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4109 IEM_MC_LOCAL(uint64_t, u64Dst);
4110 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4111 IEM_MC_ARG(uint64_t, u64Src, 2);
4112 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4113 IEM_MC_PREPARE_FPU_USAGE();
4114 IEM_MC_FPU_TO_MMX_MODE();
4115
4116 IEM_MC_REF_MXCSR(pfMxcsr);
4117 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4118
4119 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4120 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4121 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4122 } IEM_MC_ELSE() {
4123 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4124 } IEM_MC_ENDIF();
4125
4126 IEM_MC_ADVANCE_RIP_AND_FINISH();
4127 IEM_MC_END();
4128 }
4129 else
4130 {
4131 /*
4132 * Register, memory.
4133 */
4134 IEM_MC_BEGIN(3, 2, 0, 0);
4135 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4136 IEM_MC_LOCAL(uint64_t, u64Dst);
4137 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4138 IEM_MC_ARG(uint64_t, u64Src, 2);
4139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4140
4141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4143 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4144 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4145
4146 IEM_MC_PREPARE_FPU_USAGE();
4147 IEM_MC_FPU_TO_MMX_MODE();
4148 IEM_MC_REF_MXCSR(pfMxcsr);
4149
4150 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4151 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4152 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4153 } IEM_MC_ELSE() {
4154 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4155 } IEM_MC_ENDIF();
4156
4157 IEM_MC_ADVANCE_RIP_AND_FINISH();
4158 IEM_MC_END();
4159 }
4160}
4161
4162
4163/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4164FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4165{
4166 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4167 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4168 if (IEM_IS_MODRM_REG_MODE(bRm))
4169 {
4170 /*
4171 * Register, register.
4172 */
4173 IEM_MC_BEGIN(3, 1, 0, 0);
4174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4175 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4176 IEM_MC_LOCAL(uint64_t, u64Dst);
4177 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4178 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4179 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4180 IEM_MC_PREPARE_FPU_USAGE();
4181 IEM_MC_FPU_TO_MMX_MODE();
4182
4183 IEM_MC_REF_MXCSR(pfMxcsr);
4184 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4185
4186 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4187 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4188 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4189 } IEM_MC_ELSE() {
4190 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4191 } IEM_MC_ENDIF();
4192
4193 IEM_MC_ADVANCE_RIP_AND_FINISH();
4194 IEM_MC_END();
4195 }
4196 else
4197 {
4198 /*
4199 * Register, memory.
4200 */
4201 IEM_MC_BEGIN(3, 3, 0, 0);
4202 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4203 IEM_MC_LOCAL(uint64_t, u64Dst);
4204 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4205 IEM_MC_LOCAL(X86XMMREG, uSrc);
4206 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4208
4209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4211 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4212 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4213
4214 IEM_MC_PREPARE_FPU_USAGE();
4215 IEM_MC_FPU_TO_MMX_MODE();
4216
4217 IEM_MC_REF_MXCSR(pfMxcsr);
4218
4219 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4220 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4221 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4222 } IEM_MC_ELSE() {
4223 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4224 } IEM_MC_ENDIF();
4225
4226 IEM_MC_ADVANCE_RIP_AND_FINISH();
4227 IEM_MC_END();
4228 }
4229}
4230
4231
4232/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4233FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4234{
4235 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4236
4237 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4238 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4239 {
4240 if (IEM_IS_MODRM_REG_MODE(bRm))
4241 {
4242 /* greg64, XMM */
4243 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4244 IEM_MC_LOCAL(uint32_t, fMxcsr);
4245 IEM_MC_LOCAL(int64_t, i64Dst);
4246 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4247 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4248 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4249
4250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4251 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4252 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4253
4254 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4255 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4256 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4257 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4258 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4259 } IEM_MC_ELSE() {
4260 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4261 } IEM_MC_ENDIF();
4262
4263 IEM_MC_ADVANCE_RIP_AND_FINISH();
4264 IEM_MC_END();
4265 }
4266 else
4267 {
4268 /* greg64, [mem64] */
4269 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4271 IEM_MC_LOCAL(uint32_t, fMxcsr);
4272 IEM_MC_LOCAL(int64_t, i64Dst);
4273 IEM_MC_LOCAL(uint32_t, u32Src);
4274 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4275 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4276 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4277
4278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4280 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4281 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4282
4283 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4284 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4285 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4286 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4287 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4288 } IEM_MC_ELSE() {
4289 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4290 } IEM_MC_ENDIF();
4291
4292 IEM_MC_ADVANCE_RIP_AND_FINISH();
4293 IEM_MC_END();
4294 }
4295 }
4296 else
4297 {
4298 if (IEM_IS_MODRM_REG_MODE(bRm))
4299 {
4300 /* greg, XMM */
4301 IEM_MC_BEGIN(3, 2, 0, 0);
4302 IEM_MC_LOCAL(uint32_t, fMxcsr);
4303 IEM_MC_LOCAL(int32_t, i32Dst);
4304 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4305 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4306 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4307
4308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4309 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4310 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4311
4312 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4313 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4314 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4315 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4316 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4317 } IEM_MC_ELSE() {
4318 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4319 } IEM_MC_ENDIF();
4320
4321 IEM_MC_ADVANCE_RIP_AND_FINISH();
4322 IEM_MC_END();
4323 }
4324 else
4325 {
4326 /* greg, [mem] */
4327 IEM_MC_BEGIN(3, 4, 0, 0);
4328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4329 IEM_MC_LOCAL(uint32_t, fMxcsr);
4330 IEM_MC_LOCAL(int32_t, i32Dst);
4331 IEM_MC_LOCAL(uint32_t, u32Src);
4332 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4333 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4334 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4335
4336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4338 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4339 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4340
4341 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4342 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4343 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4344 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4345 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4346 } IEM_MC_ELSE() {
4347 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4348 } IEM_MC_ENDIF();
4349
4350 IEM_MC_ADVANCE_RIP_AND_FINISH();
4351 IEM_MC_END();
4352 }
4353 }
4354}
4355
4356
4357/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4358FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4359{
4360 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4361
4362 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4363 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4364 {
4365 if (IEM_IS_MODRM_REG_MODE(bRm))
4366 {
4367 /* greg64, XMM */
4368 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4369 IEM_MC_LOCAL(uint32_t, fMxcsr);
4370 IEM_MC_LOCAL(int64_t, i64Dst);
4371 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4372 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4373 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4374
4375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4376 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4377 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4378
4379 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4380 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4381 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4382 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4383 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4384 } IEM_MC_ELSE() {
4385 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4386 } IEM_MC_ENDIF();
4387
4388 IEM_MC_ADVANCE_RIP_AND_FINISH();
4389 IEM_MC_END();
4390 }
4391 else
4392 {
4393 /* greg64, [mem64] */
4394 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4396 IEM_MC_LOCAL(uint32_t, fMxcsr);
4397 IEM_MC_LOCAL(int64_t, i64Dst);
4398 IEM_MC_LOCAL(uint64_t, u64Src);
4399 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4400 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4401 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4402
4403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4405 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4406 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4407
4408 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4409 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4410 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4411 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4412 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4413 } IEM_MC_ELSE() {
4414 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4415 } IEM_MC_ENDIF();
4416
4417 IEM_MC_ADVANCE_RIP_AND_FINISH();
4418 IEM_MC_END();
4419 }
4420 }
4421 else
4422 {
4423 if (IEM_IS_MODRM_REG_MODE(bRm))
4424 {
4425 /* greg, XMM */
4426 IEM_MC_BEGIN(3, 2, 0, 0);
4427 IEM_MC_LOCAL(uint32_t, fMxcsr);
4428 IEM_MC_LOCAL(int32_t, i32Dst);
4429 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4430 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4431 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4432
4433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4434 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4435 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4436
4437 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4438 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4439 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4440 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4441 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4442 } IEM_MC_ELSE() {
4443 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4444 } IEM_MC_ENDIF();
4445
4446 IEM_MC_ADVANCE_RIP_AND_FINISH();
4447 IEM_MC_END();
4448 }
4449 else
4450 {
4451 /* greg32, [mem32] */
4452 IEM_MC_BEGIN(3, 4, 0, 0);
4453 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4454 IEM_MC_LOCAL(uint32_t, fMxcsr);
4455 IEM_MC_LOCAL(int32_t, i32Dst);
4456 IEM_MC_LOCAL(uint64_t, u64Src);
4457 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4458 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4459 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4460
4461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4463 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4464 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4465
4466 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4467 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4468 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4469 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4470 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4471 } IEM_MC_ELSE() {
4472 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4473 } IEM_MC_ENDIF();
4474
4475 IEM_MC_ADVANCE_RIP_AND_FINISH();
4476 IEM_MC_END();
4477 }
4478 }
4479}
4480
4481
4482/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4483FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4484{
4485 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4486 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4487 if (IEM_IS_MODRM_REG_MODE(bRm))
4488 {
4489 /*
4490 * Register, register.
4491 */
4492 IEM_MC_BEGIN(3, 1, 0, 0);
4493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4494 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4495 IEM_MC_LOCAL(uint64_t, u64Dst);
4496 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4497 IEM_MC_ARG(uint64_t, u64Src, 2);
4498
4499 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4500 IEM_MC_PREPARE_FPU_USAGE();
4501 IEM_MC_FPU_TO_MMX_MODE();
4502
4503 IEM_MC_REF_MXCSR(pfMxcsr);
4504 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4505
4506 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4507 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4508 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4509 } IEM_MC_ELSE() {
4510 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4511 } IEM_MC_ENDIF();
4512
4513 IEM_MC_ADVANCE_RIP_AND_FINISH();
4514 IEM_MC_END();
4515 }
4516 else
4517 {
4518 /*
4519 * Register, memory.
4520 */
4521 IEM_MC_BEGIN(3, 2, 0, 0);
4522 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4523 IEM_MC_LOCAL(uint64_t, u64Dst);
4524 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4525 IEM_MC_ARG(uint64_t, u64Src, 2);
4526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4527
4528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4530 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4531 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4532
4533 IEM_MC_PREPARE_FPU_USAGE();
4534 IEM_MC_FPU_TO_MMX_MODE();
4535 IEM_MC_REF_MXCSR(pfMxcsr);
4536
4537 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4538 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4539 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4540 } IEM_MC_ELSE() {
4541 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4542 } IEM_MC_ENDIF();
4543
4544 IEM_MC_ADVANCE_RIP_AND_FINISH();
4545 IEM_MC_END();
4546 }
4547}
4548
4549
4550/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4551FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4552{
4553 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4554 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4555 if (IEM_IS_MODRM_REG_MODE(bRm))
4556 {
4557 /*
4558 * Register, register.
4559 */
4560 IEM_MC_BEGIN(3, 1, 0, 0);
4561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4562 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4563 IEM_MC_LOCAL(uint64_t, u64Dst);
4564 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4565 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4566
4567 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4568 IEM_MC_PREPARE_FPU_USAGE();
4569 IEM_MC_FPU_TO_MMX_MODE();
4570
4571 IEM_MC_REF_MXCSR(pfMxcsr);
4572 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4573
4574 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4575 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4576 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4577 } IEM_MC_ELSE() {
4578 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4579 } IEM_MC_ENDIF();
4580
4581 IEM_MC_ADVANCE_RIP_AND_FINISH();
4582 IEM_MC_END();
4583 }
4584 else
4585 {
4586 /*
4587 * Register, memory.
4588 */
4589 IEM_MC_BEGIN(3, 3, 0, 0);
4590 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4591 IEM_MC_LOCAL(uint64_t, u64Dst);
4592 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4593 IEM_MC_LOCAL(X86XMMREG, uSrc);
4594 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4596
4597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4599 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4600 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4601
4602 IEM_MC_PREPARE_FPU_USAGE();
4603 IEM_MC_FPU_TO_MMX_MODE();
4604
4605 IEM_MC_REF_MXCSR(pfMxcsr);
4606
4607 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4608 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4609 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4610 } IEM_MC_ELSE() {
4611 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4612 } IEM_MC_ENDIF();
4613
4614 IEM_MC_ADVANCE_RIP_AND_FINISH();
4615 IEM_MC_END();
4616 }
4617}
4618
4619
4620/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4621FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4622{
4623 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4624
4625 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4626 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4627 {
4628 if (IEM_IS_MODRM_REG_MODE(bRm))
4629 {
4630 /* greg64, XMM */
4631 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4632 IEM_MC_LOCAL(uint32_t, fMxcsr);
4633 IEM_MC_LOCAL(int64_t, i64Dst);
4634 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4635 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4636 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4637
4638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4639 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4640 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4641
4642 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4643 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4644 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4645 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4646 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4647 } IEM_MC_ELSE() {
4648 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4649 } IEM_MC_ENDIF();
4650
4651 IEM_MC_ADVANCE_RIP_AND_FINISH();
4652 IEM_MC_END();
4653 }
4654 else
4655 {
4656 /* greg64, [mem64] */
4657 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4659 IEM_MC_LOCAL(uint32_t, fMxcsr);
4660 IEM_MC_LOCAL(int64_t, i64Dst);
4661 IEM_MC_LOCAL(uint32_t, u32Src);
4662 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4663 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4664 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4665
4666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4668 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4669 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4670
4671 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4672 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4673 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4674 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4675 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4676 } IEM_MC_ELSE() {
4677 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4678 } IEM_MC_ENDIF();
4679
4680 IEM_MC_ADVANCE_RIP_AND_FINISH();
4681 IEM_MC_END();
4682 }
4683 }
4684 else
4685 {
4686 if (IEM_IS_MODRM_REG_MODE(bRm))
4687 {
4688 /* greg, XMM */
4689 IEM_MC_BEGIN(3, 2, 0, 0);
4690 IEM_MC_LOCAL(uint32_t, fMxcsr);
4691 IEM_MC_LOCAL(int32_t, i32Dst);
4692 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4693 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4694 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4695
4696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4697 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4698 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4699
4700 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4701 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4702 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4703 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4704 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4705 } IEM_MC_ELSE() {
4706 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4707 } IEM_MC_ENDIF();
4708
4709 IEM_MC_ADVANCE_RIP_AND_FINISH();
4710 IEM_MC_END();
4711 }
4712 else
4713 {
4714 /* greg, [mem] */
4715 IEM_MC_BEGIN(3, 4, 0, 0);
4716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4717 IEM_MC_LOCAL(uint32_t, fMxcsr);
4718 IEM_MC_LOCAL(int32_t, i32Dst);
4719 IEM_MC_LOCAL(uint32_t, u32Src);
4720 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4721 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4722 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4723
4724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4726 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4727 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4728
4729 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4730 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4731 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4732 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4733 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4734 } IEM_MC_ELSE() {
4735 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4736 } IEM_MC_ENDIF();
4737
4738 IEM_MC_ADVANCE_RIP_AND_FINISH();
4739 IEM_MC_END();
4740 }
4741 }
4742}
4743
4744
4745/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4746FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4747{
4748 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4749
4750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4751 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4752 {
4753 if (IEM_IS_MODRM_REG_MODE(bRm))
4754 {
4755 /* greg64, XMM */
4756 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4757 IEM_MC_LOCAL(uint32_t, fMxcsr);
4758 IEM_MC_LOCAL(int64_t, i64Dst);
4759 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4760 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4761 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4762
4763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4764 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4765 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4766
4767 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4768 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4769 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4770 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4771 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4772 } IEM_MC_ELSE() {
4773 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4774 } IEM_MC_ENDIF();
4775
4776 IEM_MC_ADVANCE_RIP_AND_FINISH();
4777 IEM_MC_END();
4778 }
4779 else
4780 {
4781 /* greg64, [mem64] */
4782 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4784 IEM_MC_LOCAL(uint32_t, fMxcsr);
4785 IEM_MC_LOCAL(int64_t, i64Dst);
4786 IEM_MC_LOCAL(uint64_t, u64Src);
4787 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4788 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4789 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4790
4791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4793 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4794 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4795
4796 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4797 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4798 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4799 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4800 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4801 } IEM_MC_ELSE() {
4802 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4803 } IEM_MC_ENDIF();
4804
4805 IEM_MC_ADVANCE_RIP_AND_FINISH();
4806 IEM_MC_END();
4807 }
4808 }
4809 else
4810 {
4811 if (IEM_IS_MODRM_REG_MODE(bRm))
4812 {
4813 /* greg32, XMM */
4814 IEM_MC_BEGIN(3, 2, 0, 0);
4815 IEM_MC_LOCAL(uint32_t, fMxcsr);
4816 IEM_MC_LOCAL(int32_t, i32Dst);
4817 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4818 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4819 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4820
4821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4822 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4823 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4824
4825 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4826 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4827 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4828 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4829 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4830 } IEM_MC_ELSE() {
4831 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4832 } IEM_MC_ENDIF();
4833
4834 IEM_MC_ADVANCE_RIP_AND_FINISH();
4835 IEM_MC_END();
4836 }
4837 else
4838 {
4839 /* greg32, [mem64] */
4840 IEM_MC_BEGIN(3, 4, 0, 0);
4841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4842 IEM_MC_LOCAL(uint32_t, fMxcsr);
4843 IEM_MC_LOCAL(int32_t, i32Dst);
4844 IEM_MC_LOCAL(uint64_t, u64Src);
4845 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4846 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4847 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4848
4849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4851 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4852 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4853
4854 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4855 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4856 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4857 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4858 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4859 } IEM_MC_ELSE() {
4860 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4861 } IEM_MC_ENDIF();
4862
4863 IEM_MC_ADVANCE_RIP_AND_FINISH();
4864 IEM_MC_END();
4865 }
4866 }
4867}
4868
4869
4870/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
4871FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4872{
4873 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4874 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4875 if (IEM_IS_MODRM_REG_MODE(bRm))
4876 {
4877 /*
4878 * Register, register.
4879 */
4880 IEM_MC_BEGIN(4, 1, 0, 0);
4881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4882 IEM_MC_LOCAL(uint32_t, fEFlags);
4883 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4884 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4885 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4886 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4887 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4888 IEM_MC_PREPARE_SSE_USAGE();
4889 IEM_MC_FETCH_EFLAGS(fEFlags);
4890 IEM_MC_REF_MXCSR(pfMxcsr);
4891 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4892 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4893 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4894 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4895 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4896 } IEM_MC_ELSE() {
4897 IEM_MC_COMMIT_EFLAGS(fEFlags);
4898 } IEM_MC_ENDIF();
4899
4900 IEM_MC_ADVANCE_RIP_AND_FINISH();
4901 IEM_MC_END();
4902 }
4903 else
4904 {
4905 /*
4906 * Register, memory.
4907 */
4908 IEM_MC_BEGIN(4, 3, 0, 0);
4909 IEM_MC_LOCAL(uint32_t, fEFlags);
4910 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4911 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4912 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4913 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4914 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4916
4917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4919 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4920 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4921
4922 IEM_MC_PREPARE_SSE_USAGE();
4923 IEM_MC_FETCH_EFLAGS(fEFlags);
4924 IEM_MC_REF_MXCSR(pfMxcsr);
4925 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4926 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4927 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4928 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4929 } IEM_MC_ELSE() {
4930 IEM_MC_COMMIT_EFLAGS(fEFlags);
4931 } IEM_MC_ENDIF();
4932
4933 IEM_MC_ADVANCE_RIP_AND_FINISH();
4934 IEM_MC_END();
4935 }
4936}
4937
4938
4939/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
4940FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4941{
4942 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4943 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4944 if (IEM_IS_MODRM_REG_MODE(bRm))
4945 {
4946 /*
4947 * Register, register.
4948 */
4949 IEM_MC_BEGIN(4, 1, 0, 0);
4950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4951 IEM_MC_LOCAL(uint32_t, fEFlags);
4952 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4953 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4954 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4955 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4956 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4957 IEM_MC_PREPARE_SSE_USAGE();
4958 IEM_MC_FETCH_EFLAGS(fEFlags);
4959 IEM_MC_REF_MXCSR(pfMxcsr);
4960 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4961 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4962 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4963 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4964 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4965 } IEM_MC_ELSE() {
4966 IEM_MC_COMMIT_EFLAGS(fEFlags);
4967 } IEM_MC_ENDIF();
4968
4969 IEM_MC_ADVANCE_RIP_AND_FINISH();
4970 IEM_MC_END();
4971 }
4972 else
4973 {
4974 /*
4975 * Register, memory.
4976 */
4977 IEM_MC_BEGIN(4, 3, 0, 0);
4978 IEM_MC_LOCAL(uint32_t, fEFlags);
4979 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4980 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4981 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4982 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4983 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4984 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4985
4986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4988 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4989 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4990
4991 IEM_MC_PREPARE_SSE_USAGE();
4992 IEM_MC_FETCH_EFLAGS(fEFlags);
4993 IEM_MC_REF_MXCSR(pfMxcsr);
4994 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4995 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4996 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4997 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4998 } IEM_MC_ELSE() {
4999 IEM_MC_COMMIT_EFLAGS(fEFlags);
5000 } IEM_MC_ENDIF();
5001
5002 IEM_MC_ADVANCE_RIP_AND_FINISH();
5003 IEM_MC_END();
5004 }
5005}
5006
5007
5008/* Opcode 0xf3 0x0f 0x2e - invalid */
5009/* Opcode 0xf2 0x0f 0x2e - invalid */
5010
5011
5012/** Opcode 0x0f 0x2f - comiss Vss, Wss */
5013FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
5014{
5015 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5016 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5017 if (IEM_IS_MODRM_REG_MODE(bRm))
5018 {
5019 /*
5020 * Register, register.
5021 */
5022 IEM_MC_BEGIN(4, 1, 0, 0);
5023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5024 IEM_MC_LOCAL(uint32_t, fEFlags);
5025 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5026 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5027 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5028 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5029 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5030 IEM_MC_PREPARE_SSE_USAGE();
5031 IEM_MC_FETCH_EFLAGS(fEFlags);
5032 IEM_MC_REF_MXCSR(pfMxcsr);
5033 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5034 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5035 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5036 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5037 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5038 } IEM_MC_ELSE() {
5039 IEM_MC_COMMIT_EFLAGS(fEFlags);
5040 } IEM_MC_ENDIF();
5041
5042 IEM_MC_ADVANCE_RIP_AND_FINISH();
5043 IEM_MC_END();
5044 }
5045 else
5046 {
5047 /*
5048 * Register, memory.
5049 */
5050 IEM_MC_BEGIN(4, 3, 0, 0);
5051 IEM_MC_LOCAL(uint32_t, fEFlags);
5052 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5053 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5054 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5055 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5056 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5058
5059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5061 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5062 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5063
5064 IEM_MC_PREPARE_SSE_USAGE();
5065 IEM_MC_FETCH_EFLAGS(fEFlags);
5066 IEM_MC_REF_MXCSR(pfMxcsr);
5067 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5068 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5069 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5070 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5071 } IEM_MC_ELSE() {
5072 IEM_MC_COMMIT_EFLAGS(fEFlags);
5073 } IEM_MC_ENDIF();
5074
5075 IEM_MC_ADVANCE_RIP_AND_FINISH();
5076 IEM_MC_END();
5077 }
5078}
5079
5080
5081/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
5082FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
5083{
5084 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5085 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5086 if (IEM_IS_MODRM_REG_MODE(bRm))
5087 {
5088 /*
5089 * Register, register.
5090 */
5091 IEM_MC_BEGIN(4, 1, 0, 0);
5092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5093 IEM_MC_LOCAL(uint32_t, fEFlags);
5094 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5095 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5096 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5097 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5098 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5099 IEM_MC_PREPARE_SSE_USAGE();
5100 IEM_MC_FETCH_EFLAGS(fEFlags);
5101 IEM_MC_REF_MXCSR(pfMxcsr);
5102 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5103 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5104 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5105 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5106 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5107 } IEM_MC_ELSE() {
5108 IEM_MC_COMMIT_EFLAGS(fEFlags);
5109 } IEM_MC_ENDIF();
5110
5111 IEM_MC_ADVANCE_RIP_AND_FINISH();
5112 IEM_MC_END();
5113 }
5114 else
5115 {
5116 /*
5117 * Register, memory.
5118 */
5119 IEM_MC_BEGIN(4, 3, 0, 0);
5120 IEM_MC_LOCAL(uint32_t, fEFlags);
5121 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5122 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5123 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5124 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5125 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5127
5128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5130 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5131 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5132
5133 IEM_MC_PREPARE_SSE_USAGE();
5134 IEM_MC_FETCH_EFLAGS(fEFlags);
5135 IEM_MC_REF_MXCSR(pfMxcsr);
5136 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5137 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5138 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5139 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5140 } IEM_MC_ELSE() {
5141 IEM_MC_COMMIT_EFLAGS(fEFlags);
5142 } IEM_MC_ENDIF();
5143
5144 IEM_MC_ADVANCE_RIP_AND_FINISH();
5145 IEM_MC_END();
5146 }
5147}
5148
5149
5150/* Opcode 0xf3 0x0f 0x2f - invalid */
5151/* Opcode 0xf2 0x0f 0x2f - invalid */
5152
5153/** Opcode 0x0f 0x30. */
5154FNIEMOP_DEF(iemOp_wrmsr)
5155{
5156 IEMOP_MNEMONIC(wrmsr, "wrmsr");
5157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5158 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_wrmsr);
5159}
5160
5161
5162/** Opcode 0x0f 0x31. */
5163FNIEMOP_DEF(iemOp_rdtsc)
5164{
5165 IEMOP_MNEMONIC(rdtsc, "rdtsc");
5166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5167 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
5168 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
5169 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
5170 iemCImpl_rdtsc);
5171}
5172
5173
5174/** Opcode 0x0f 0x33. */
5175FNIEMOP_DEF(iemOp_rdmsr)
5176{
5177 IEMOP_MNEMONIC(rdmsr, "rdmsr");
5178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5179 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
5180 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
5181 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
5182 iemCImpl_rdmsr);
5183}
5184
5185
5186/** Opcode 0x0f 0x34. */
5187FNIEMOP_DEF(iemOp_rdpmc)
5188{
5189 IEMOP_MNEMONIC(rdpmc, "rdpmc");
5190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5191 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
5192 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
5193 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
5194 iemCImpl_rdpmc);
5195}
5196
5197
5198/** Opcode 0x0f 0x34. */
5199FNIEMOP_DEF(iemOp_sysenter)
5200{
5201 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5203 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
5204 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
5205 iemCImpl_sysenter);
5206}
5207
5208/** Opcode 0x0f 0x35. */
5209FNIEMOP_DEF(iemOp_sysexit)
5210{
5211 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5213 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
5214 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
5215 iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
5216}
5217
5218/** Opcode 0x0f 0x37. */
5219FNIEMOP_STUB(iemOp_getsec);
5220
5221
5222/** Opcode 0x0f 0x38. */
5223FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
5224{
5225#ifdef IEM_WITH_THREE_0F_38
5226 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5227 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5228#else
5229 IEMOP_BITCH_ABOUT_STUB();
5230 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5231#endif
5232}
5233
5234
5235/** Opcode 0x0f 0x3a. */
5236FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
5237{
5238#ifdef IEM_WITH_THREE_0F_3A
5239 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5240 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5241#else
5242 IEMOP_BITCH_ABOUT_STUB();
5243 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5244#endif
5245}
5246
5247
5248/**
5249 * Implements a conditional move.
5250 *
5251 * Wish there was an obvious way to do this where we could share and reduce
5252 * code bloat.
5253 *
5254 * @param a_Cnd The conditional "microcode" operation.
5255 */
5256#define CMOV_X(a_Cnd) \
5257 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5258 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5259 { \
5260 switch (pVCpu->iem.s.enmEffOpSize) \
5261 { \
5262 case IEMMODE_16BIT: \
5263 IEM_MC_BEGIN(0, 1, 0, 0); \
5264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5265 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5266 a_Cnd { \
5267 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5268 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5269 } IEM_MC_ENDIF(); \
5270 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5271 IEM_MC_END(); \
5272 break; \
5273 \
5274 case IEMMODE_32BIT: \
5275 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0); \
5276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5277 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5278 a_Cnd { \
5279 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5280 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5281 } IEM_MC_ELSE() { \
5282 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5283 } IEM_MC_ENDIF(); \
5284 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5285 IEM_MC_END(); \
5286 break; \
5287 \
5288 case IEMMODE_64BIT: \
5289 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0); \
5290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5291 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5292 a_Cnd { \
5293 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5294 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5295 } IEM_MC_ENDIF(); \
5296 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5297 IEM_MC_END(); \
5298 break; \
5299 \
5300 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5301 } \
5302 } \
5303 else \
5304 { \
5305 switch (pVCpu->iem.s.enmEffOpSize) \
5306 { \
5307 case IEMMODE_16BIT: \
5308 IEM_MC_BEGIN(0, 2, 0, 0); \
5309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5310 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5313 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5314 a_Cnd { \
5315 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5316 } IEM_MC_ENDIF(); \
5317 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5318 IEM_MC_END(); \
5319 break; \
5320 \
5321 case IEMMODE_32BIT: \
5322 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0); \
5323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5324 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5327 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5328 a_Cnd { \
5329 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5330 } IEM_MC_ELSE() { \
5331 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5332 } IEM_MC_ENDIF(); \
5333 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5334 IEM_MC_END(); \
5335 break; \
5336 \
5337 case IEMMODE_64BIT: \
5338 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0); \
5339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5340 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5343 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5344 a_Cnd { \
5345 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5346 } IEM_MC_ENDIF(); \
5347 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5348 IEM_MC_END(); \
5349 break; \
5350 \
5351 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5352 } \
5353 } do {} while (0)
5354
5355
5356
5357/** Opcode 0x0f 0x40. */
5358FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5359{
5360 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5361 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5362}
5363
5364
5365/** Opcode 0x0f 0x41. */
5366FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5367{
5368 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5369 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5370}
5371
5372
5373/** Opcode 0x0f 0x42. */
5374FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5375{
5376 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5377 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5378}
5379
5380
5381/** Opcode 0x0f 0x43. */
5382FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5383{
5384 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5385 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5386}
5387
5388
5389/** Opcode 0x0f 0x44. */
5390FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5391{
5392 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5393 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5394}
5395
5396
5397/** Opcode 0x0f 0x45. */
5398FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5399{
5400 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5401 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5402}
5403
5404
5405/** Opcode 0x0f 0x46. */
5406FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5407{
5408 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5409 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5410}
5411
5412
5413/** Opcode 0x0f 0x47. */
5414FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5415{
5416 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5417 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5418}
5419
5420
5421/** Opcode 0x0f 0x48. */
5422FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5423{
5424 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5425 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5426}
5427
5428
5429/** Opcode 0x0f 0x49. */
5430FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5431{
5432 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5433 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5434}
5435
5436
5437/** Opcode 0x0f 0x4a. */
5438FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5439{
5440 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5441 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5442}
5443
5444
5445/** Opcode 0x0f 0x4b. */
5446FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5447{
5448 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5449 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5450}
5451
5452
5453/** Opcode 0x0f 0x4c. */
5454FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5455{
5456 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5457 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5458}
5459
5460
5461/** Opcode 0x0f 0x4d. */
5462FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5463{
5464 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5465 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5466}
5467
5468
5469/** Opcode 0x0f 0x4e. */
5470FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5471{
5472 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5473 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5474}
5475
5476
5477/** Opcode 0x0f 0x4f. */
5478FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5479{
5480 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5481 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5482}
5483
5484#undef CMOV_X
5485
5486/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5487FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5488{
5489 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5491 if (IEM_IS_MODRM_REG_MODE(bRm))
5492 {
5493 /*
5494 * Register, register.
5495 */
5496 IEM_MC_BEGIN(2, 1, 0, 0);
5497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5498 IEM_MC_LOCAL(uint8_t, u8Dst);
5499 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5500 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5501 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5502 IEM_MC_PREPARE_SSE_USAGE();
5503 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5504 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5505 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5506 IEM_MC_ADVANCE_RIP_AND_FINISH();
5507 IEM_MC_END();
5508 }
5509 /* No memory operand. */
5510 else
5511 IEMOP_RAISE_INVALID_OPCODE_RET();
5512}
5513
5514
5515/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5516FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5517{
5518 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5520 if (IEM_IS_MODRM_REG_MODE(bRm))
5521 {
5522 /*
5523 * Register, register.
5524 */
5525 IEM_MC_BEGIN(2, 1, 0, 0);
5526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5527 IEM_MC_LOCAL(uint8_t, u8Dst);
5528 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5529 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5530 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5531 IEM_MC_PREPARE_SSE_USAGE();
5532 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5533 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5534 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG_8(bRm), u8Dst);
5535 IEM_MC_ADVANCE_RIP_AND_FINISH();
5536 IEM_MC_END();
5537 }
5538 /* No memory operand. */
5539 else
5540 IEMOP_RAISE_INVALID_OPCODE_RET();
5541
5542}
5543
5544
5545/* Opcode 0xf3 0x0f 0x50 - invalid */
5546/* Opcode 0xf2 0x0f 0x50 - invalid */
5547
5548
5549/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5550FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5551{
5552 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5553 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5554}
5555
5556
5557/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5558FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5559{
5560 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5561 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5562}
5563
5564
5565/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5566FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5567{
5568 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5569 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5570}
5571
5572
5573/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5574FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5575{
5576 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5577 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5578}
5579
5580
5581/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5582FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5583{
5584 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5585 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5586}
5587
5588
5589/* Opcode 0x66 0x0f 0x52 - invalid */
5590
5591
5592/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5593FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5594{
5595 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5596 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5597}
5598
5599
5600/* Opcode 0xf2 0x0f 0x52 - invalid */
5601
5602
5603/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5604FNIEMOP_DEF(iemOp_rcpps_Vps_Wps)
5605{
5606 IEMOP_MNEMONIC2(RM, RCPPS, rcpps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5607 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rcpps_u128);
5608}
5609
5610
5611/* Opcode 0x66 0x0f 0x53 - invalid */
5612
5613
5614/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5615FNIEMOP_DEF(iemOp_rcpss_Vss_Wss)
5616{
5617 IEMOP_MNEMONIC2(RM, RCPSS, rcpss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5618 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rcpss_u128_r32);
5619}
5620
5621
5622/* Opcode 0xf2 0x0f 0x53 - invalid */
5623
5624
5625/** Opcode 0x0f 0x54 - andps Vps, Wps */
5626FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5627{
5628 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5629 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pand_u128);
5630}
5631
5632
5633/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5634FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5635{
5636 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5637 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5638}
5639
5640
5641/* Opcode 0xf3 0x0f 0x54 - invalid */
5642/* Opcode 0xf2 0x0f 0x54 - invalid */
5643
5644
5645/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5646FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5647{
5648 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5649 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pandn_u128);
5650}
5651
5652
5653/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5654FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5655{
5656 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5657 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5658}
5659
5660
5661/* Opcode 0xf3 0x0f 0x55 - invalid */
5662/* Opcode 0xf2 0x0f 0x55 - invalid */
5663
5664
5665/** Opcode 0x0f 0x56 - orps Vps, Wps */
5666FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5667{
5668 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5669 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_por_u128);
5670}
5671
5672
5673/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5674FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5675{
5676 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5677 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5678}
5679
5680
5681/* Opcode 0xf3 0x0f 0x56 - invalid */
5682/* Opcode 0xf2 0x0f 0x56 - invalid */
5683
5684
5685/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5686FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5687{
5688 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5689 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pxor_u128);
5690}
5691
5692
5693/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5694FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5695{
5696 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5697 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5698}
5699
5700
5701/* Opcode 0xf3 0x0f 0x57 - invalid */
5702/* Opcode 0xf2 0x0f 0x57 - invalid */
5703
5704/** Opcode 0x0f 0x58 - addps Vps, Wps */
5705FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5706{
5707 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5708 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5709}
5710
5711
5712/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5713FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5714{
5715 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5716 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5717}
5718
5719
5720/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5721FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5722{
5723 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5724 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5725}
5726
5727
5728/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5729FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5730{
5731 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5732 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5733}
5734
5735
5736/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5737FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5738{
5739 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5740 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5741}
5742
5743
5744/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5745FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5746{
5747 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5748 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5749}
5750
5751
5752/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5753FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5754{
5755 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5756 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5757}
5758
5759
5760/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5761FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5762{
5763 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5764 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5765}
5766
5767
5768/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5769FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5770{
5771 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5772 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5773}
5774
5775
5776/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5777FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5778{
5779 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5780 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5781}
5782
5783
5784/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5785FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5786{
5787 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5788 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5789}
5790
5791
5792/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5793FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5794{
5795 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5796 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5797}
5798
5799
5800/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5801FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5802{
5803 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5804 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5805}
5806
5807
5808/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5809FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5810{
5811 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5812 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5813}
5814
5815
5816/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5817FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5818{
5819 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5820 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5821}
5822
5823
5824/* Opcode 0xf2 0x0f 0x5b - invalid */
5825
5826
5827/** Opcode 0x0f 0x5c - subps Vps, Wps */
5828FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5829{
5830 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5831 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5832}
5833
5834
5835/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5836FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5837{
5838 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5839 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5840}
5841
5842
5843/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5844FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5845{
5846 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5847 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5848}
5849
5850
5851/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5852FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5853{
5854 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5855 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5856}
5857
5858
5859/** Opcode 0x0f 0x5d - minps Vps, Wps */
5860FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5861{
5862 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5863 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5864}
5865
5866
5867/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5868FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5869{
5870 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5871 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5872}
5873
5874
5875/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5876FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5877{
5878 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5879 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5880}
5881
5882
5883/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5884FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5885{
5886 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5887 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5888}
5889
5890
5891/** Opcode 0x0f 0x5e - divps Vps, Wps */
5892FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5893{
5894 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5895 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5896}
5897
5898
5899/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5900FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5901{
5902 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5903 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5904}
5905
5906
5907/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5908FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5909{
5910 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5911 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5912}
5913
5914
5915/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5916FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5917{
5918 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5919 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5920}
5921
5922
5923/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5924FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5925{
5926 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5927 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5928}
5929
5930
5931/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5932FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5933{
5934 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5935 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5936}
5937
5938
5939/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5940FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5941{
5942 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5943 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5944}
5945
5946
5947/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5948FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5949{
5950 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5951 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5952}
5953
5954
5955/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5956FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5957{
5958 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5959 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5960}
5961
5962
5963/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5964FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5965{
5966 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5967 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5968}
5969
5970
5971/* Opcode 0xf3 0x0f 0x60 - invalid */
5972
5973
5974/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5975FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5976{
5977 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5978 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5979 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5980}
5981
5982
5983/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5984FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5985{
5986 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5987 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5988}
5989
5990
5991/* Opcode 0xf3 0x0f 0x61 - invalid */
5992
5993
5994/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5995FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5996{
5997 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5998 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5999}
6000
6001
6002/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
6003FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
6004{
6005 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6006 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
6007}
6008
6009
6010/* Opcode 0xf3 0x0f 0x62 - invalid */
6011
6012
6013
6014/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
6015FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
6016{
6017 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6018 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
6019}
6020
6021
6022/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
6023FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
6024{
6025 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6026 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
6027}
6028
6029
6030/* Opcode 0xf3 0x0f 0x63 - invalid */
6031
6032
6033/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
6034FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
6035{
6036 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6037 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
6038}
6039
6040
6041/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
6042FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
6043{
6044 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6045 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
6046}
6047
6048
6049/* Opcode 0xf3 0x0f 0x64 - invalid */
6050
6051
6052/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
6053FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
6054{
6055 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6056 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
6057}
6058
6059
6060/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
6061FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
6062{
6063 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6064 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
6065}
6066
6067
6068/* Opcode 0xf3 0x0f 0x65 - invalid */
6069
6070
6071/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
6072FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
6073{
6074 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6075 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
6076}
6077
6078
6079/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
6080FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
6081{
6082 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6083 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
6084}
6085
6086
6087/* Opcode 0xf3 0x0f 0x66 - invalid */
6088
6089
6090/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
6091FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
6092{
6093 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6094 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
6095}
6096
6097
6098/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
6099FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
6100{
6101 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6102 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
6103}
6104
6105
6106/* Opcode 0xf3 0x0f 0x67 - invalid */
6107
6108
6109/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
6110 * @note Intel and AMD both uses Qd for the second parameter, however they
6111 * both list it as a mmX/mem64 operand and intel describes it as being
6112 * loaded as a qword, so it should be Qq, shouldn't it? */
6113FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
6114{
6115 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6116 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
6117}
6118
6119
6120/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
6121FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
6122{
6123 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6124 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
6125}
6126
6127
6128/* Opcode 0xf3 0x0f 0x68 - invalid */
6129
6130
6131/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
6132 * @note Intel and AMD both uses Qd for the second parameter, however they
6133 * both list it as a mmX/mem64 operand and intel describes it as being
6134 * loaded as a qword, so it should be Qq, shouldn't it? */
6135FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
6136{
6137 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6138 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
6139}
6140
6141
6142/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
6143FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
6144{
6145 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6146 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
6147
6148}
6149
6150
6151/* Opcode 0xf3 0x0f 0x69 - invalid */
6152
6153
6154/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
6155 * @note Intel and AMD both uses Qd for the second parameter, however they
6156 * both list it as a mmX/mem64 operand and intel describes it as being
6157 * loaded as a qword, so it should be Qq, shouldn't it? */
6158FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
6159{
6160 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6161 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
6162}
6163
6164
6165/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
6166FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
6167{
6168 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6169 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
6170}
6171
6172
6173/* Opcode 0xf3 0x0f 0x6a - invalid */
6174
6175
6176/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
6177FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
6178{
6179 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6180 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
6181}
6182
6183
6184/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
6185FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
6186{
6187 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6188 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
6189}
6190
6191
6192/* Opcode 0xf3 0x0f 0x6b - invalid */
6193
6194
6195/* Opcode 0x0f 0x6c - invalid */
6196
6197
6198/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
6199FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
6200{
6201 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6202 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
6203}
6204
6205
6206/* Opcode 0xf3 0x0f 0x6c - invalid */
6207/* Opcode 0xf2 0x0f 0x6c - invalid */
6208
6209
6210/* Opcode 0x0f 0x6d - invalid */
6211
6212
6213/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
6214FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
6215{
6216 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6217 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
6218}
6219
6220
6221/* Opcode 0xf3 0x0f 0x6d - invalid */
6222
6223
6224FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6225{
6226 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6227 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6228 {
6229 /**
6230 * @opcode 0x6e
6231 * @opcodesub rex.w=1
6232 * @oppfx none
6233 * @opcpuid mmx
6234 * @opgroup og_mmx_datamove
6235 * @opxcpttype 5
6236 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6237 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6238 */
6239 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6240 if (IEM_IS_MODRM_REG_MODE(bRm))
6241 {
6242 /* MMX, greg64 */
6243 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6245 IEM_MC_LOCAL(uint64_t, u64Tmp);
6246
6247 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6248 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6249 IEM_MC_FPU_TO_MMX_MODE();
6250
6251 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6252 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6253
6254 IEM_MC_ADVANCE_RIP_AND_FINISH();
6255 IEM_MC_END();
6256 }
6257 else
6258 {
6259 /* MMX, [mem64] */
6260 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6262 IEM_MC_LOCAL(uint64_t, u64Tmp);
6263
6264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6267 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6268 IEM_MC_FPU_TO_MMX_MODE();
6269
6270 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6271 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6272
6273 IEM_MC_ADVANCE_RIP_AND_FINISH();
6274 IEM_MC_END();
6275 }
6276 }
6277 else
6278 {
6279 /**
6280 * @opdone
6281 * @opcode 0x6e
6282 * @opcodesub rex.w=0
6283 * @oppfx none
6284 * @opcpuid mmx
6285 * @opgroup og_mmx_datamove
6286 * @opxcpttype 5
6287 * @opfunction iemOp_movd_q_Pd_Ey
6288 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6289 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6290 */
6291 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6292 if (IEM_IS_MODRM_REG_MODE(bRm))
6293 {
6294 /* MMX, greg32 */
6295 IEM_MC_BEGIN(0, 1, 0, 0);
6296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6297 IEM_MC_LOCAL(uint32_t, u32Tmp);
6298
6299 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6300 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6301 IEM_MC_FPU_TO_MMX_MODE();
6302
6303 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6304 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6305
6306 IEM_MC_ADVANCE_RIP_AND_FINISH();
6307 IEM_MC_END();
6308 }
6309 else
6310 {
6311 /* MMX, [mem32] */
6312 IEM_MC_BEGIN(0, 2, 0, 0);
6313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6314 IEM_MC_LOCAL(uint32_t, u32Tmp);
6315
6316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6318 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6319 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6320 IEM_MC_FPU_TO_MMX_MODE();
6321
6322 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6323 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6324
6325 IEM_MC_ADVANCE_RIP_AND_FINISH();
6326 IEM_MC_END();
6327 }
6328 }
6329}
6330
6331FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6332{
6333 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6334 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6335 {
6336 /**
6337 * @opcode 0x6e
6338 * @opcodesub rex.w=1
6339 * @oppfx 0x66
6340 * @opcpuid sse2
6341 * @opgroup og_sse2_simdint_datamove
6342 * @opxcpttype 5
6343 * @optest 64-bit / op1=1 op2=2 -> op1=2
6344 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6345 */
6346 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6347 if (IEM_IS_MODRM_REG_MODE(bRm))
6348 {
6349 /* XMM, greg64 */
6350 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6352 IEM_MC_LOCAL(uint64_t, u64Tmp);
6353
6354 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6355 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6356
6357 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6358 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6359
6360 IEM_MC_ADVANCE_RIP_AND_FINISH();
6361 IEM_MC_END();
6362 }
6363 else
6364 {
6365 /* XMM, [mem64] */
6366 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6368 IEM_MC_LOCAL(uint64_t, u64Tmp);
6369
6370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6372 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6373 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6374
6375 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6376 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6377
6378 IEM_MC_ADVANCE_RIP_AND_FINISH();
6379 IEM_MC_END();
6380 }
6381 }
6382 else
6383 {
6384 /**
6385 * @opdone
6386 * @opcode 0x6e
6387 * @opcodesub rex.w=0
6388 * @oppfx 0x66
6389 * @opcpuid sse2
6390 * @opgroup og_sse2_simdint_datamove
6391 * @opxcpttype 5
6392 * @opfunction iemOp_movd_q_Vy_Ey
6393 * @optest op1=1 op2=2 -> op1=2
6394 * @optest op1=0 op2=-42 -> op1=-42
6395 */
6396 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6397 if (IEM_IS_MODRM_REG_MODE(bRm))
6398 {
6399 /* XMM, greg32 */
6400 IEM_MC_BEGIN(0, 1, 0, 0);
6401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6402 IEM_MC_LOCAL(uint32_t, u32Tmp);
6403
6404 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6405 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6406
6407 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6408 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6409
6410 IEM_MC_ADVANCE_RIP_AND_FINISH();
6411 IEM_MC_END();
6412 }
6413 else
6414 {
6415 /* XMM, [mem32] */
6416 IEM_MC_BEGIN(0, 2, 0, 0);
6417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6418 IEM_MC_LOCAL(uint32_t, u32Tmp);
6419
6420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6422 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6423 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6424
6425 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6426 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6427
6428 IEM_MC_ADVANCE_RIP_AND_FINISH();
6429 IEM_MC_END();
6430 }
6431 }
6432}
6433
6434/* Opcode 0xf3 0x0f 0x6e - invalid */
6435
6436
6437/**
6438 * @opcode 0x6f
6439 * @oppfx none
6440 * @opcpuid mmx
6441 * @opgroup og_mmx_datamove
6442 * @opxcpttype 5
6443 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6444 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6445 */
6446FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6447{
6448 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6449 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6450 if (IEM_IS_MODRM_REG_MODE(bRm))
6451 {
6452 /*
6453 * Register, register.
6454 */
6455 IEM_MC_BEGIN(0, 1, 0, 0);
6456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6457 IEM_MC_LOCAL(uint64_t, u64Tmp);
6458
6459 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6460 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6461 IEM_MC_FPU_TO_MMX_MODE();
6462
6463 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6464 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6465
6466 IEM_MC_ADVANCE_RIP_AND_FINISH();
6467 IEM_MC_END();
6468 }
6469 else
6470 {
6471 /*
6472 * Register, memory.
6473 */
6474 IEM_MC_BEGIN(0, 2, 0, 0);
6475 IEM_MC_LOCAL(uint64_t, u64Tmp);
6476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6477
6478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6480 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6481 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6482 IEM_MC_FPU_TO_MMX_MODE();
6483
6484 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6485 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6486
6487 IEM_MC_ADVANCE_RIP_AND_FINISH();
6488 IEM_MC_END();
6489 }
6490}
6491
6492/**
6493 * @opcode 0x6f
6494 * @oppfx 0x66
6495 * @opcpuid sse2
6496 * @opgroup og_sse2_simdint_datamove
6497 * @opxcpttype 1
6498 * @optest op1=1 op2=2 -> op1=2
6499 * @optest op1=0 op2=-42 -> op1=-42
6500 */
6501FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6502{
6503 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6504 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6505 if (IEM_IS_MODRM_REG_MODE(bRm))
6506 {
6507 /*
6508 * Register, register.
6509 */
6510 IEM_MC_BEGIN(0, 0, 0, 0);
6511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6512
6513 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6514 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6515
6516 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6517 IEM_GET_MODRM_RM(pVCpu, bRm));
6518 IEM_MC_ADVANCE_RIP_AND_FINISH();
6519 IEM_MC_END();
6520 }
6521 else
6522 {
6523 /*
6524 * Register, memory.
6525 */
6526 IEM_MC_BEGIN(0, 2, 0, 0);
6527 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6529
6530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6532 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6533 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6534
6535 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6536 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6537
6538 IEM_MC_ADVANCE_RIP_AND_FINISH();
6539 IEM_MC_END();
6540 }
6541}
6542
6543/**
6544 * @opcode 0x6f
6545 * @oppfx 0xf3
6546 * @opcpuid sse2
6547 * @opgroup og_sse2_simdint_datamove
6548 * @opxcpttype 4UA
6549 * @optest op1=1 op2=2 -> op1=2
6550 * @optest op1=0 op2=-42 -> op1=-42
6551 */
6552FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6553{
6554 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6555 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6556 if (IEM_IS_MODRM_REG_MODE(bRm))
6557 {
6558 /*
6559 * Register, register.
6560 */
6561 IEM_MC_BEGIN(0, 0, 0, 0);
6562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6563 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6564 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6565 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6566 IEM_GET_MODRM_RM(pVCpu, bRm));
6567 IEM_MC_ADVANCE_RIP_AND_FINISH();
6568 IEM_MC_END();
6569 }
6570 else
6571 {
6572 /*
6573 * Register, memory.
6574 */
6575 IEM_MC_BEGIN(0, 2, 0, 0);
6576 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6578
6579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6581 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6582 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6583 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6584 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6585
6586 IEM_MC_ADVANCE_RIP_AND_FINISH();
6587 IEM_MC_END();
6588 }
6589}
6590
6591
6592/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6593FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6594{
6595 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6596 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6597 if (IEM_IS_MODRM_REG_MODE(bRm))
6598 {
6599 /*
6600 * Register, register.
6601 */
6602 IEM_MC_BEGIN(3, 0, 0, 0);
6603 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6605 IEM_MC_ARG(uint64_t *, pDst, 0);
6606 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6607 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6608 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6609 IEM_MC_PREPARE_FPU_USAGE();
6610 IEM_MC_FPU_TO_MMX_MODE();
6611
6612 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6613 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6614 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6615 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6616
6617 IEM_MC_ADVANCE_RIP_AND_FINISH();
6618 IEM_MC_END();
6619 }
6620 else
6621 {
6622 /*
6623 * Register, memory.
6624 */
6625 IEM_MC_BEGIN(3, 2, 0, 0);
6626 IEM_MC_ARG(uint64_t *, pDst, 0);
6627 IEM_MC_LOCAL(uint64_t, uSrc);
6628 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6630
6631 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6632 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6633 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6635 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6636 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6637
6638 IEM_MC_PREPARE_FPU_USAGE();
6639 IEM_MC_FPU_TO_MMX_MODE();
6640
6641 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6642 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6643 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6644
6645 IEM_MC_ADVANCE_RIP_AND_FINISH();
6646 IEM_MC_END();
6647 }
6648}
6649
6650
6651/**
6652 * Common worker for SSE2 instructions on the forms:
6653 * pshufd xmm1, xmm2/mem128, imm8
6654 * pshufhw xmm1, xmm2/mem128, imm8
6655 * pshuflw xmm1, xmm2/mem128, imm8
6656 *
6657 * Proper alignment of the 128-bit operand is enforced.
6658 * Exceptions type 4. SSE2 cpuid checks.
6659 */
6660FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6661{
6662 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6663 if (IEM_IS_MODRM_REG_MODE(bRm))
6664 {
6665 /*
6666 * Register, register.
6667 */
6668 IEM_MC_BEGIN(3, 0, 0, 0);
6669 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6671 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6672 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6673 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6674 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6675 IEM_MC_PREPARE_SSE_USAGE();
6676 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6677 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6678 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6679 IEM_MC_ADVANCE_RIP_AND_FINISH();
6680 IEM_MC_END();
6681 }
6682 else
6683 {
6684 /*
6685 * Register, memory.
6686 */
6687 IEM_MC_BEGIN(3, 2, 0, 0);
6688 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6689 IEM_MC_LOCAL(RTUINT128U, uSrc);
6690 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6692
6693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6694 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6695 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6697 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6698
6699 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6700 IEM_MC_PREPARE_SSE_USAGE();
6701 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6702 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6703
6704 IEM_MC_ADVANCE_RIP_AND_FINISH();
6705 IEM_MC_END();
6706 }
6707}
6708
6709
6710/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6711FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6712{
6713 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6714 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6715}
6716
6717
6718/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6719FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6720{
6721 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6722 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6723}
6724
6725
6726/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6727FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6728{
6729 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6730 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6731}
6732
6733
6734/**
6735 * Common worker for MMX instructions of the form:
6736 * psrlw mm, imm8
6737 * psraw mm, imm8
6738 * psllw mm, imm8
6739 * psrld mm, imm8
6740 * psrad mm, imm8
6741 * pslld mm, imm8
6742 * psrlq mm, imm8
6743 * psllq mm, imm8
6744 *
6745 */
6746FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6747{
6748 if (IEM_IS_MODRM_REG_MODE(bRm))
6749 {
6750 /*
6751 * Register, immediate.
6752 */
6753 IEM_MC_BEGIN(2, 0, 0, 0);
6754 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6756 IEM_MC_ARG(uint64_t *, pDst, 0);
6757 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6758 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6759 IEM_MC_PREPARE_FPU_USAGE();
6760 IEM_MC_FPU_TO_MMX_MODE();
6761
6762 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6763 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6764 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6765
6766 IEM_MC_ADVANCE_RIP_AND_FINISH();
6767 IEM_MC_END();
6768 }
6769 else
6770 {
6771 /*
6772 * Register, memory not supported.
6773 */
6774 /// @todo Caller already enforced register mode?!
6775 AssertFailedReturn(VINF_SUCCESS);
6776 }
6777}
6778
6779
6780/**
6781 * Common worker for SSE2 instructions of the form:
6782 * psrlw xmm, imm8
6783 * psraw xmm, imm8
6784 * psllw xmm, imm8
6785 * psrld xmm, imm8
6786 * psrad xmm, imm8
6787 * pslld xmm, imm8
6788 * psrlq xmm, imm8
6789 * psllq xmm, imm8
6790 *
6791 */
6792FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6793{
6794 if (IEM_IS_MODRM_REG_MODE(bRm))
6795 {
6796 /*
6797 * Register, immediate.
6798 */
6799 IEM_MC_BEGIN(2, 0, 0, 0);
6800 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6802 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6803 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6804 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6805 IEM_MC_PREPARE_SSE_USAGE();
6806 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6807 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6808 IEM_MC_ADVANCE_RIP_AND_FINISH();
6809 IEM_MC_END();
6810 }
6811 else
6812 {
6813 /*
6814 * Register, memory.
6815 */
6816 /// @todo Caller already enforced register mode?!
6817 AssertFailedReturn(VINF_SUCCESS);
6818 }
6819}
6820
6821
6822/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6823FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6824{
6825// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6826 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6827}
6828
6829
6830/** Opcode 0x66 0x0f 0x71 11/2. */
6831FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6832{
6833// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6834 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6835}
6836
6837
6838/** Opcode 0x0f 0x71 11/4. */
6839FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6840{
6841// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6842 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6843}
6844
6845
6846/** Opcode 0x66 0x0f 0x71 11/4. */
6847FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6848{
6849// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6850 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6851}
6852
6853
6854/** Opcode 0x0f 0x71 11/6. */
6855FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6856{
6857// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6858 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6859}
6860
6861
6862/** Opcode 0x66 0x0f 0x71 11/6. */
6863FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6864{
6865// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6866 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6867}
6868
6869
6870/**
6871 * Group 12 jump table for register variant.
6872 */
6873IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6874{
6875 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6876 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6877 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6878 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6879 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6880 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6881 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6882 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6883};
6884AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6885
6886
6887/** Opcode 0x0f 0x71. */
6888FNIEMOP_DEF(iemOp_Grp12)
6889{
6890 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6891 if (IEM_IS_MODRM_REG_MODE(bRm))
6892 /* register, register */
6893 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6894 + pVCpu->iem.s.idxPrefix], bRm);
6895 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6896}
6897
6898
6899/** Opcode 0x0f 0x72 11/2. */
6900FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6901{
6902// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6903 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6904}
6905
6906
6907/** Opcode 0x66 0x0f 0x72 11/2. */
6908FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6909{
6910// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6911 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6912}
6913
6914
6915/** Opcode 0x0f 0x72 11/4. */
6916FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6917{
6918// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6919 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6920}
6921
6922
6923/** Opcode 0x66 0x0f 0x72 11/4. */
6924FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6925{
6926// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6927 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6928}
6929
6930
6931/** Opcode 0x0f 0x72 11/6. */
6932FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6933{
6934// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6935 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6936}
6937
6938/** Opcode 0x66 0x0f 0x72 11/6. */
6939FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6940{
6941// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6942 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6943}
6944
6945
6946/**
6947 * Group 13 jump table for register variant.
6948 */
6949IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6950{
6951 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6952 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6953 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6954 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6955 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6956 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6957 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6958 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6959};
6960AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6961
6962/** Opcode 0x0f 0x72. */
6963FNIEMOP_DEF(iemOp_Grp13)
6964{
6965 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6966 if (IEM_IS_MODRM_REG_MODE(bRm))
6967 /* register, register */
6968 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6969 + pVCpu->iem.s.idxPrefix], bRm);
6970 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6971}
6972
6973
6974/** Opcode 0x0f 0x73 11/2. */
6975FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6976{
6977// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6978 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6979}
6980
6981
6982/** Opcode 0x66 0x0f 0x73 11/2. */
6983FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6984{
6985// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6986 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6987}
6988
6989
6990/** Opcode 0x66 0x0f 0x73 11/3. */
6991FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6992{
6993// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6994 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6995}
6996
6997
6998/** Opcode 0x0f 0x73 11/6. */
6999FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
7000{
7001// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
7002 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
7003}
7004
7005
7006/** Opcode 0x66 0x0f 0x73 11/6. */
7007FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
7008{
7009// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
7010 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
7011}
7012
7013
7014/** Opcode 0x66 0x0f 0x73 11/7. */
7015FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
7016{
7017// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
7018 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
7019}
7020
7021/**
7022 * Group 14 jump table for register variant.
7023 */
7024IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
7025{
7026 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7027 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7028 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7029 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7030 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7031 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
7032 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7033 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
7034};
7035AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
7036
7037
7038/** Opcode 0x0f 0x73. */
7039FNIEMOP_DEF(iemOp_Grp14)
7040{
7041 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7042 if (IEM_IS_MODRM_REG_MODE(bRm))
7043 /* register, register */
7044 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7045 + pVCpu->iem.s.idxPrefix], bRm);
7046 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
7047}
7048
7049
7050/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
7051FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
7052{
7053 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7054 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
7055}
7056
7057
7058/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
7059FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
7060{
7061 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7062 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
7063}
7064
7065
7066/* Opcode 0xf3 0x0f 0x74 - invalid */
7067/* Opcode 0xf2 0x0f 0x74 - invalid */
7068
7069
7070/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
7071FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
7072{
7073 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7074 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
7075}
7076
7077
7078/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
7079FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
7080{
7081 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7082 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
7083}
7084
7085
7086/* Opcode 0xf3 0x0f 0x75 - invalid */
7087/* Opcode 0xf2 0x0f 0x75 - invalid */
7088
7089
7090/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
7091FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
7092{
7093 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7094 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
7095}
7096
7097
7098/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
7099FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
7100{
7101 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7102 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
7103}
7104
7105
7106/* Opcode 0xf3 0x0f 0x76 - invalid */
7107/* Opcode 0xf2 0x0f 0x76 - invalid */
7108
7109
7110/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
7111FNIEMOP_DEF(iemOp_emms)
7112{
7113 IEMOP_MNEMONIC(emms, "emms");
7114 IEM_MC_BEGIN(0, 0, 0, 0);
7115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7116 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7117 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7118 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7119 IEM_MC_FPU_FROM_MMX_MODE();
7120 IEM_MC_ADVANCE_RIP_AND_FINISH();
7121 IEM_MC_END();
7122}
7123
7124/* Opcode 0x66 0x0f 0x77 - invalid */
7125/* Opcode 0xf3 0x0f 0x77 - invalid */
7126/* Opcode 0xf2 0x0f 0x77 - invalid */
7127
7128/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
7129#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7130FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
7131{
7132 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
7133 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
7134 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
7135 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7136
7137 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7138 if (IEM_IS_MODRM_REG_MODE(bRm))
7139 {
7140 /*
7141 * Register, register.
7142 */
7143 if (enmEffOpSize == IEMMODE_64BIT)
7144 {
7145 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
7146 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7147 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7148 IEM_MC_ARG(uint64_t, u64Enc, 1);
7149 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7150 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7151 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7152 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
7153 iemCImpl_vmread_reg64, pu64Dst, u64Enc);
7154 IEM_MC_END();
7155 }
7156 else
7157 {
7158 IEM_MC_BEGIN(2, 0, 0, 0);
7159 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7160 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7161 IEM_MC_ARG(uint32_t, u32Enc, 1);
7162 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7163 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7164 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7165 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
7166 iemCImpl_vmread_reg32, pu64Dst, u32Enc);
7167 IEM_MC_END();
7168 }
7169 }
7170 else
7171 {
7172 /*
7173 * Memory, register.
7174 */
7175 if (enmEffOpSize == IEMMODE_64BIT)
7176 {
7177 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
7178 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7180 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7181 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7182 IEM_MC_ARG(uint64_t, u64Enc, 2);
7183 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7184 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7185 iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7186 IEM_MC_END();
7187 }
7188 else
7189 {
7190 IEM_MC_BEGIN(3, 0, 0, 0);
7191 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7193 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7194 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7195 IEM_MC_ARG(uint32_t, u32Enc, 2);
7196 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7197 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7198 iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7199 IEM_MC_END();
7200 }
7201 }
7202}
7203#else
7204FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
7205#endif
7206
7207/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7208FNIEMOP_STUB(iemOp_AmdGrp17);
7209/* Opcode 0xf3 0x0f 0x78 - invalid */
7210/* Opcode 0xf2 0x0f 0x78 - invalid */
7211
7212/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7213#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7214FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7215{
7216 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7217 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7218 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7219 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7220
7221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7222 if (IEM_IS_MODRM_REG_MODE(bRm))
7223 {
7224 /*
7225 * Register, register.
7226 */
7227 if (enmEffOpSize == IEMMODE_64BIT)
7228 {
7229 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
7230 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7231 IEM_MC_ARG(uint64_t, u64Val, 0);
7232 IEM_MC_ARG(uint64_t, u64Enc, 1);
7233 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7234 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7235 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u64Val, u64Enc);
7236 IEM_MC_END();
7237 }
7238 else
7239 {
7240 IEM_MC_BEGIN(2, 0, 0, 0);
7241 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7242 IEM_MC_ARG(uint32_t, u32Val, 0);
7243 IEM_MC_ARG(uint32_t, u32Enc, 1);
7244 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7245 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7246 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmwrite_reg, u32Val, u32Enc);
7247 IEM_MC_END();
7248 }
7249 }
7250 else
7251 {
7252 /*
7253 * Register, memory.
7254 */
7255 if (enmEffOpSize == IEMMODE_64BIT)
7256 {
7257 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
7258 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7260 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7261 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7262 IEM_MC_ARG(uint64_t, u64Enc, 2);
7263 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7264 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7265 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7266 IEM_MC_END();
7267 }
7268 else
7269 {
7270 IEM_MC_BEGIN(3, 0, 0, 0);
7271 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7273 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7274 IEM_MC_ARG(uint32_t, u32Enc, 2);
7275 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7276 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7277 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0,
7278 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7279 IEM_MC_END();
7280 }
7281 }
7282}
7283#else
7284FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
7285#endif
7286/* Opcode 0x66 0x0f 0x79 - invalid */
7287/* Opcode 0xf3 0x0f 0x79 - invalid */
7288/* Opcode 0xf2 0x0f 0x79 - invalid */
7289
7290/* Opcode 0x0f 0x7a - invalid */
7291/* Opcode 0x66 0x0f 0x7a - invalid */
7292/* Opcode 0xf3 0x0f 0x7a - invalid */
7293/* Opcode 0xf2 0x0f 0x7a - invalid */
7294
7295/* Opcode 0x0f 0x7b - invalid */
7296/* Opcode 0x66 0x0f 0x7b - invalid */
7297/* Opcode 0xf3 0x0f 0x7b - invalid */
7298/* Opcode 0xf2 0x0f 0x7b - invalid */
7299
7300/* Opcode 0x0f 0x7c - invalid */
7301
7302
7303/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7304FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7305{
7306 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7307 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7308}
7309
7310
7311/* Opcode 0xf3 0x0f 0x7c - invalid */
7312
7313
7314/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7315FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7316{
7317 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7318 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7319}
7320
7321
7322/* Opcode 0x0f 0x7d - invalid */
7323
7324
7325/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7326FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7327{
7328 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7329 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7330}
7331
7332
7333/* Opcode 0xf3 0x0f 0x7d - invalid */
7334
7335
7336/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7337FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7338{
7339 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7340 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7341}
7342
7343
7344/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7345FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7346{
7347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7348 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7349 {
7350 /**
7351 * @opcode 0x7e
7352 * @opcodesub rex.w=1
7353 * @oppfx none
7354 * @opcpuid mmx
7355 * @opgroup og_mmx_datamove
7356 * @opxcpttype 5
7357 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7358 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7359 */
7360 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7361 if (IEM_IS_MODRM_REG_MODE(bRm))
7362 {
7363 /* greg64, MMX */
7364 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
7365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7366 IEM_MC_LOCAL(uint64_t, u64Tmp);
7367
7368 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7369 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7370 IEM_MC_FPU_TO_MMX_MODE();
7371
7372 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7373 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7374
7375 IEM_MC_ADVANCE_RIP_AND_FINISH();
7376 IEM_MC_END();
7377 }
7378 else
7379 {
7380 /* [mem64], MMX */
7381 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
7382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7383 IEM_MC_LOCAL(uint64_t, u64Tmp);
7384
7385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7387 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7388 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7389 IEM_MC_FPU_TO_MMX_MODE();
7390
7391 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7392 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7393
7394 IEM_MC_ADVANCE_RIP_AND_FINISH();
7395 IEM_MC_END();
7396 }
7397 }
7398 else
7399 {
7400 /**
7401 * @opdone
7402 * @opcode 0x7e
7403 * @opcodesub rex.w=0
7404 * @oppfx none
7405 * @opcpuid mmx
7406 * @opgroup og_mmx_datamove
7407 * @opxcpttype 5
7408 * @opfunction iemOp_movd_q_Pd_Ey
7409 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7410 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7411 */
7412 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7413 if (IEM_IS_MODRM_REG_MODE(bRm))
7414 {
7415 /* greg32, MMX */
7416 IEM_MC_BEGIN(0, 1, 0, 0);
7417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7418 IEM_MC_LOCAL(uint32_t, u32Tmp);
7419
7420 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7421 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7422 IEM_MC_FPU_TO_MMX_MODE();
7423
7424 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7425 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7426
7427 IEM_MC_ADVANCE_RIP_AND_FINISH();
7428 IEM_MC_END();
7429 }
7430 else
7431 {
7432 /* [mem32], MMX */
7433 IEM_MC_BEGIN(0, 2, 0, 0);
7434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7435 IEM_MC_LOCAL(uint32_t, u32Tmp);
7436
7437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7439 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7440 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7441 IEM_MC_FPU_TO_MMX_MODE();
7442
7443 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7444 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7445
7446 IEM_MC_ADVANCE_RIP_AND_FINISH();
7447 IEM_MC_END();
7448 }
7449 }
7450}
7451
7452
7453FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7454{
7455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7456 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7457 {
7458 /**
7459 * @opcode 0x7e
7460 * @opcodesub rex.w=1
7461 * @oppfx 0x66
7462 * @opcpuid sse2
7463 * @opgroup og_sse2_simdint_datamove
7464 * @opxcpttype 5
7465 * @optest 64-bit / op1=1 op2=2 -> op1=2
7466 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7467 */
7468 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7469 if (IEM_IS_MODRM_REG_MODE(bRm))
7470 {
7471 /* greg64, XMM */
7472 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
7473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7474 IEM_MC_LOCAL(uint64_t, u64Tmp);
7475
7476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7477 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7478
7479 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7480 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7481
7482 IEM_MC_ADVANCE_RIP_AND_FINISH();
7483 IEM_MC_END();
7484 }
7485 else
7486 {
7487 /* [mem64], XMM */
7488 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
7489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7490 IEM_MC_LOCAL(uint64_t, u64Tmp);
7491
7492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7494 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7495 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7496
7497 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7498 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7499
7500 IEM_MC_ADVANCE_RIP_AND_FINISH();
7501 IEM_MC_END();
7502 }
7503 }
7504 else
7505 {
7506 /**
7507 * @opdone
7508 * @opcode 0x7e
7509 * @opcodesub rex.w=0
7510 * @oppfx 0x66
7511 * @opcpuid sse2
7512 * @opgroup og_sse2_simdint_datamove
7513 * @opxcpttype 5
7514 * @opfunction iemOp_movd_q_Vy_Ey
7515 * @optest op1=1 op2=2 -> op1=2
7516 * @optest op1=0 op2=-42 -> op1=-42
7517 */
7518 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7519 if (IEM_IS_MODRM_REG_MODE(bRm))
7520 {
7521 /* greg32, XMM */
7522 IEM_MC_BEGIN(0, 1, 0, 0);
7523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7524 IEM_MC_LOCAL(uint32_t, u32Tmp);
7525
7526 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7527 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7528
7529 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7530 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7531
7532 IEM_MC_ADVANCE_RIP_AND_FINISH();
7533 IEM_MC_END();
7534 }
7535 else
7536 {
7537 /* [mem32], XMM */
7538 IEM_MC_BEGIN(0, 2, 0, 0);
7539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7540 IEM_MC_LOCAL(uint32_t, u32Tmp);
7541
7542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7544 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7545 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7546
7547 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7548 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7549
7550 IEM_MC_ADVANCE_RIP_AND_FINISH();
7551 IEM_MC_END();
7552 }
7553 }
7554}
7555
7556/**
7557 * @opcode 0x7e
7558 * @oppfx 0xf3
7559 * @opcpuid sse2
7560 * @opgroup og_sse2_pcksclr_datamove
7561 * @opxcpttype none
7562 * @optest op1=1 op2=2 -> op1=2
7563 * @optest op1=0 op2=-42 -> op1=-42
7564 */
7565FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7566{
7567 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7568 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7569 if (IEM_IS_MODRM_REG_MODE(bRm))
7570 {
7571 /*
7572 * XMM128, XMM64.
7573 */
7574 IEM_MC_BEGIN(0, 2, 0, 0);
7575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7576 IEM_MC_LOCAL(uint64_t, uSrc);
7577
7578 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7579 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7580
7581 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7582 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7583
7584 IEM_MC_ADVANCE_RIP_AND_FINISH();
7585 IEM_MC_END();
7586 }
7587 else
7588 {
7589 /*
7590 * XMM128, [mem64].
7591 */
7592 IEM_MC_BEGIN(0, 2, 0, 0);
7593 IEM_MC_LOCAL(uint64_t, uSrc);
7594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7595
7596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7598 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7599 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7600
7601 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7602 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7603
7604 IEM_MC_ADVANCE_RIP_AND_FINISH();
7605 IEM_MC_END();
7606 }
7607}
7608
7609/* Opcode 0xf2 0x0f 0x7e - invalid */
7610
7611
7612/** Opcode 0x0f 0x7f - movq Qq, Pq */
7613FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7614{
7615 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7616 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7617 if (IEM_IS_MODRM_REG_MODE(bRm))
7618 {
7619 /*
7620 * MMX, MMX.
7621 */
7622 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7623 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7624 IEM_MC_BEGIN(0, 1, 0, 0);
7625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7626 IEM_MC_LOCAL(uint64_t, u64Tmp);
7627 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7628 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7629 IEM_MC_FPU_TO_MMX_MODE();
7630
7631 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7632 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7633
7634 IEM_MC_ADVANCE_RIP_AND_FINISH();
7635 IEM_MC_END();
7636 }
7637 else
7638 {
7639 /*
7640 * [mem64], MMX.
7641 */
7642 IEM_MC_BEGIN(0, 2, 0, 0);
7643 IEM_MC_LOCAL(uint64_t, u64Tmp);
7644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7645
7646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7648 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7649 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7650 IEM_MC_FPU_TO_MMX_MODE();
7651
7652 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7653 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7654
7655 IEM_MC_ADVANCE_RIP_AND_FINISH();
7656 IEM_MC_END();
7657 }
7658}
7659
7660/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7661FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7662{
7663 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7665 if (IEM_IS_MODRM_REG_MODE(bRm))
7666 {
7667 /*
7668 * XMM, XMM.
7669 */
7670 IEM_MC_BEGIN(0, 0, 0, 0);
7671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7672 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7673 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7674 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7675 IEM_GET_MODRM_REG(pVCpu, bRm));
7676 IEM_MC_ADVANCE_RIP_AND_FINISH();
7677 IEM_MC_END();
7678 }
7679 else
7680 {
7681 /*
7682 * [mem128], XMM.
7683 */
7684 IEM_MC_BEGIN(0, 2, 0, 0);
7685 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7686 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7687
7688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7690 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7691 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7692
7693 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7694 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7695
7696 IEM_MC_ADVANCE_RIP_AND_FINISH();
7697 IEM_MC_END();
7698 }
7699}
7700
7701/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7702FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7703{
7704 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7706 if (IEM_IS_MODRM_REG_MODE(bRm))
7707 {
7708 /*
7709 * XMM, XMM.
7710 */
7711 IEM_MC_BEGIN(0, 0, 0, 0);
7712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7713 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7714 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7715 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7716 IEM_GET_MODRM_REG(pVCpu, bRm));
7717 IEM_MC_ADVANCE_RIP_AND_FINISH();
7718 IEM_MC_END();
7719 }
7720 else
7721 {
7722 /*
7723 * [mem128], XMM.
7724 */
7725 IEM_MC_BEGIN(0, 2, 0, 0);
7726 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7728
7729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7731 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7732 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7733
7734 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7735 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7736
7737 IEM_MC_ADVANCE_RIP_AND_FINISH();
7738 IEM_MC_END();
7739 }
7740}
7741
7742/* Opcode 0xf2 0x0f 0x7f - invalid */
7743
7744
7745
7746/** Opcode 0x0f 0x80. */
7747FNIEMOP_DEF(iemOp_jo_Jv)
7748{
7749 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7750 IEMOP_HLP_MIN_386();
7751 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7752 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7753 {
7754 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7755 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7757 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7758 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7759 } IEM_MC_ELSE() {
7760 IEM_MC_ADVANCE_RIP_AND_FINISH();
7761 } IEM_MC_ENDIF();
7762 IEM_MC_END();
7763 }
7764 else
7765 {
7766 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7767 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7769 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7770 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7771 } IEM_MC_ELSE() {
7772 IEM_MC_ADVANCE_RIP_AND_FINISH();
7773 } IEM_MC_ENDIF();
7774 IEM_MC_END();
7775 }
7776}
7777
7778
7779/** Opcode 0x0f 0x81. */
7780FNIEMOP_DEF(iemOp_jno_Jv)
7781{
7782 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7783 IEMOP_HLP_MIN_386();
7784 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7785 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7786 {
7787 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7788 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7790 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7791 IEM_MC_ADVANCE_RIP_AND_FINISH();
7792 } IEM_MC_ELSE() {
7793 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7794 } IEM_MC_ENDIF();
7795 IEM_MC_END();
7796 }
7797 else
7798 {
7799 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7800 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7802 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7803 IEM_MC_ADVANCE_RIP_AND_FINISH();
7804 } IEM_MC_ELSE() {
7805 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7806 } IEM_MC_ENDIF();
7807 IEM_MC_END();
7808 }
7809}
7810
7811
7812/** Opcode 0x0f 0x82. */
7813FNIEMOP_DEF(iemOp_jc_Jv)
7814{
7815 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7816 IEMOP_HLP_MIN_386();
7817 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7818 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7819 {
7820 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7821 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7823 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7824 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7825 } IEM_MC_ELSE() {
7826 IEM_MC_ADVANCE_RIP_AND_FINISH();
7827 } IEM_MC_ENDIF();
7828 IEM_MC_END();
7829 }
7830 else
7831 {
7832 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7833 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7835 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7836 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7837 } IEM_MC_ELSE() {
7838 IEM_MC_ADVANCE_RIP_AND_FINISH();
7839 } IEM_MC_ENDIF();
7840 IEM_MC_END();
7841 }
7842}
7843
7844
7845/** Opcode 0x0f 0x83. */
7846FNIEMOP_DEF(iemOp_jnc_Jv)
7847{
7848 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7849 IEMOP_HLP_MIN_386();
7850 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7851 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7852 {
7853 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7854 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7856 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7857 IEM_MC_ADVANCE_RIP_AND_FINISH();
7858 } IEM_MC_ELSE() {
7859 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7860 } IEM_MC_ENDIF();
7861 IEM_MC_END();
7862 }
7863 else
7864 {
7865 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7866 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7868 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7869 IEM_MC_ADVANCE_RIP_AND_FINISH();
7870 } IEM_MC_ELSE() {
7871 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7872 } IEM_MC_ENDIF();
7873 IEM_MC_END();
7874 }
7875}
7876
7877
7878/** Opcode 0x0f 0x84. */
7879FNIEMOP_DEF(iemOp_je_Jv)
7880{
7881 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7882 IEMOP_HLP_MIN_386();
7883 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7884 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7885 {
7886 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7887 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7889 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7890 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7891 } IEM_MC_ELSE() {
7892 IEM_MC_ADVANCE_RIP_AND_FINISH();
7893 } IEM_MC_ENDIF();
7894 IEM_MC_END();
7895 }
7896 else
7897 {
7898 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7899 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7901 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7902 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7903 } IEM_MC_ELSE() {
7904 IEM_MC_ADVANCE_RIP_AND_FINISH();
7905 } IEM_MC_ENDIF();
7906 IEM_MC_END();
7907 }
7908}
7909
7910
7911/** Opcode 0x0f 0x85. */
7912FNIEMOP_DEF(iemOp_jne_Jv)
7913{
7914 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7915 IEMOP_HLP_MIN_386();
7916 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7917 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7918 {
7919 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7920 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7922 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7923 IEM_MC_ADVANCE_RIP_AND_FINISH();
7924 } IEM_MC_ELSE() {
7925 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7926 } IEM_MC_ENDIF();
7927 IEM_MC_END();
7928 }
7929 else
7930 {
7931 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7932 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7934 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7935 IEM_MC_ADVANCE_RIP_AND_FINISH();
7936 } IEM_MC_ELSE() {
7937 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7938 } IEM_MC_ENDIF();
7939 IEM_MC_END();
7940 }
7941}
7942
7943
7944/** Opcode 0x0f 0x86. */
7945FNIEMOP_DEF(iemOp_jbe_Jv)
7946{
7947 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7948 IEMOP_HLP_MIN_386();
7949 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7950 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7951 {
7952 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7953 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7955 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7956 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7957 } IEM_MC_ELSE() {
7958 IEM_MC_ADVANCE_RIP_AND_FINISH();
7959 } IEM_MC_ENDIF();
7960 IEM_MC_END();
7961 }
7962 else
7963 {
7964 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7965 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7967 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7968 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7969 } IEM_MC_ELSE() {
7970 IEM_MC_ADVANCE_RIP_AND_FINISH();
7971 } IEM_MC_ENDIF();
7972 IEM_MC_END();
7973 }
7974}
7975
7976
7977/** Opcode 0x0f 0x87. */
7978FNIEMOP_DEF(iemOp_jnbe_Jv)
7979{
7980 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7981 IEMOP_HLP_MIN_386();
7982 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7983 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7984 {
7985 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7986 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7988 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7989 IEM_MC_ADVANCE_RIP_AND_FINISH();
7990 } IEM_MC_ELSE() {
7991 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7992 } IEM_MC_ENDIF();
7993 IEM_MC_END();
7994 }
7995 else
7996 {
7997 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7998 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8000 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8001 IEM_MC_ADVANCE_RIP_AND_FINISH();
8002 } IEM_MC_ELSE() {
8003 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8004 } IEM_MC_ENDIF();
8005 IEM_MC_END();
8006 }
8007}
8008
8009
8010/** Opcode 0x0f 0x88. */
8011FNIEMOP_DEF(iemOp_js_Jv)
8012{
8013 IEMOP_MNEMONIC(js_Jv, "js Jv");
8014 IEMOP_HLP_MIN_386();
8015 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8016 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8017 {
8018 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8019 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8021 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8022 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8023 } IEM_MC_ELSE() {
8024 IEM_MC_ADVANCE_RIP_AND_FINISH();
8025 } IEM_MC_ENDIF();
8026 IEM_MC_END();
8027 }
8028 else
8029 {
8030 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8031 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8033 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8034 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8035 } IEM_MC_ELSE() {
8036 IEM_MC_ADVANCE_RIP_AND_FINISH();
8037 } IEM_MC_ENDIF();
8038 IEM_MC_END();
8039 }
8040}
8041
8042
8043/** Opcode 0x0f 0x89. */
8044FNIEMOP_DEF(iemOp_jns_Jv)
8045{
8046 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
8047 IEMOP_HLP_MIN_386();
8048 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8049 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8050 {
8051 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8052 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8054 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8055 IEM_MC_ADVANCE_RIP_AND_FINISH();
8056 } IEM_MC_ELSE() {
8057 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8058 } IEM_MC_ENDIF();
8059 IEM_MC_END();
8060 }
8061 else
8062 {
8063 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8064 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8066 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8067 IEM_MC_ADVANCE_RIP_AND_FINISH();
8068 } IEM_MC_ELSE() {
8069 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8070 } IEM_MC_ENDIF();
8071 IEM_MC_END();
8072 }
8073}
8074
8075
8076/** Opcode 0x0f 0x8a. */
8077FNIEMOP_DEF(iemOp_jp_Jv)
8078{
8079 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
8080 IEMOP_HLP_MIN_386();
8081 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8082 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8083 {
8084 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8085 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8087 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8088 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8089 } IEM_MC_ELSE() {
8090 IEM_MC_ADVANCE_RIP_AND_FINISH();
8091 } IEM_MC_ENDIF();
8092 IEM_MC_END();
8093 }
8094 else
8095 {
8096 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8097 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8099 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8100 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8101 } IEM_MC_ELSE() {
8102 IEM_MC_ADVANCE_RIP_AND_FINISH();
8103 } IEM_MC_ENDIF();
8104 IEM_MC_END();
8105 }
8106}
8107
8108
8109/** Opcode 0x0f 0x8b. */
8110FNIEMOP_DEF(iemOp_jnp_Jv)
8111{
8112 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
8113 IEMOP_HLP_MIN_386();
8114 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8115 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8116 {
8117 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8118 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8120 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8121 IEM_MC_ADVANCE_RIP_AND_FINISH();
8122 } IEM_MC_ELSE() {
8123 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8124 } IEM_MC_ENDIF();
8125 IEM_MC_END();
8126 }
8127 else
8128 {
8129 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8130 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8132 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8133 IEM_MC_ADVANCE_RIP_AND_FINISH();
8134 } IEM_MC_ELSE() {
8135 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8136 } IEM_MC_ENDIF();
8137 IEM_MC_END();
8138 }
8139}
8140
8141
8142/** Opcode 0x0f 0x8c. */
8143FNIEMOP_DEF(iemOp_jl_Jv)
8144{
8145 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
8146 IEMOP_HLP_MIN_386();
8147 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8148 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8149 {
8150 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8151 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8153 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8154 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8155 } IEM_MC_ELSE() {
8156 IEM_MC_ADVANCE_RIP_AND_FINISH();
8157 } IEM_MC_ENDIF();
8158 IEM_MC_END();
8159 }
8160 else
8161 {
8162 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8163 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8165 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8166 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8167 } IEM_MC_ELSE() {
8168 IEM_MC_ADVANCE_RIP_AND_FINISH();
8169 } IEM_MC_ENDIF();
8170 IEM_MC_END();
8171 }
8172}
8173
8174
8175/** Opcode 0x0f 0x8d. */
8176FNIEMOP_DEF(iemOp_jnl_Jv)
8177{
8178 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8179 IEMOP_HLP_MIN_386();
8180 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8181 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8182 {
8183 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8184 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8186 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8187 IEM_MC_ADVANCE_RIP_AND_FINISH();
8188 } IEM_MC_ELSE() {
8189 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8190 } IEM_MC_ENDIF();
8191 IEM_MC_END();
8192 }
8193 else
8194 {
8195 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8196 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8198 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8199 IEM_MC_ADVANCE_RIP_AND_FINISH();
8200 } IEM_MC_ELSE() {
8201 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8202 } IEM_MC_ENDIF();
8203 IEM_MC_END();
8204 }
8205}
8206
8207
8208/** Opcode 0x0f 0x8e. */
8209FNIEMOP_DEF(iemOp_jle_Jv)
8210{
8211 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8212 IEMOP_HLP_MIN_386();
8213 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8214 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8215 {
8216 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8217 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8219 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8220 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8221 } IEM_MC_ELSE() {
8222 IEM_MC_ADVANCE_RIP_AND_FINISH();
8223 } IEM_MC_ENDIF();
8224 IEM_MC_END();
8225 }
8226 else
8227 {
8228 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8229 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8231 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8232 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8233 } IEM_MC_ELSE() {
8234 IEM_MC_ADVANCE_RIP_AND_FINISH();
8235 } IEM_MC_ENDIF();
8236 IEM_MC_END();
8237 }
8238}
8239
8240
8241/** Opcode 0x0f 0x8f. */
8242FNIEMOP_DEF(iemOp_jnle_Jv)
8243{
8244 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8245 IEMOP_HLP_MIN_386();
8246 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8247 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8248 {
8249 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8250 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8252 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8253 IEM_MC_ADVANCE_RIP_AND_FINISH();
8254 } IEM_MC_ELSE() {
8255 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8256 } IEM_MC_ENDIF();
8257 IEM_MC_END();
8258 }
8259 else
8260 {
8261 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8262 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8264 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8265 IEM_MC_ADVANCE_RIP_AND_FINISH();
8266 } IEM_MC_ELSE() {
8267 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8268 } IEM_MC_ENDIF();
8269 IEM_MC_END();
8270 }
8271}
8272
8273
8274/** Opcode 0x0f 0x90. */
8275FNIEMOP_DEF(iemOp_seto_Eb)
8276{
8277 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8278 IEMOP_HLP_MIN_386();
8279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8280
8281 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8282 * any way. AMD says it's "unused", whatever that means. We're
8283 * ignoring for now. */
8284 if (IEM_IS_MODRM_REG_MODE(bRm))
8285 {
8286 /* register target */
8287 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8289 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8290 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8291 } IEM_MC_ELSE() {
8292 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8293 } IEM_MC_ENDIF();
8294 IEM_MC_ADVANCE_RIP_AND_FINISH();
8295 IEM_MC_END();
8296 }
8297 else
8298 {
8299 /* memory target */
8300 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8304 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8305 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8306 } IEM_MC_ELSE() {
8307 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8308 } IEM_MC_ENDIF();
8309 IEM_MC_ADVANCE_RIP_AND_FINISH();
8310 IEM_MC_END();
8311 }
8312}
8313
8314
8315/** Opcode 0x0f 0x91. */
8316FNIEMOP_DEF(iemOp_setno_Eb)
8317{
8318 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8319 IEMOP_HLP_MIN_386();
8320 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8321
8322 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8323 * any way. AMD says it's "unused", whatever that means. We're
8324 * ignoring for now. */
8325 if (IEM_IS_MODRM_REG_MODE(bRm))
8326 {
8327 /* register target */
8328 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8330 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8331 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8332 } IEM_MC_ELSE() {
8333 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8334 } IEM_MC_ENDIF();
8335 IEM_MC_ADVANCE_RIP_AND_FINISH();
8336 IEM_MC_END();
8337 }
8338 else
8339 {
8340 /* memory target */
8341 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8345 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8346 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8347 } IEM_MC_ELSE() {
8348 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8349 } IEM_MC_ENDIF();
8350 IEM_MC_ADVANCE_RIP_AND_FINISH();
8351 IEM_MC_END();
8352 }
8353}
8354
8355
8356/** Opcode 0x0f 0x92. */
8357FNIEMOP_DEF(iemOp_setc_Eb)
8358{
8359 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8360 IEMOP_HLP_MIN_386();
8361 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8362
8363 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8364 * any way. AMD says it's "unused", whatever that means. We're
8365 * ignoring for now. */
8366 if (IEM_IS_MODRM_REG_MODE(bRm))
8367 {
8368 /* register target */
8369 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8371 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8372 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8373 } IEM_MC_ELSE() {
8374 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8375 } IEM_MC_ENDIF();
8376 IEM_MC_ADVANCE_RIP_AND_FINISH();
8377 IEM_MC_END();
8378 }
8379 else
8380 {
8381 /* memory target */
8382 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8386 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8387 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8388 } IEM_MC_ELSE() {
8389 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8390 } IEM_MC_ENDIF();
8391 IEM_MC_ADVANCE_RIP_AND_FINISH();
8392 IEM_MC_END();
8393 }
8394}
8395
8396
8397/** Opcode 0x0f 0x93. */
8398FNIEMOP_DEF(iemOp_setnc_Eb)
8399{
8400 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8401 IEMOP_HLP_MIN_386();
8402 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8403
8404 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8405 * any way. AMD says it's "unused", whatever that means. We're
8406 * ignoring for now. */
8407 if (IEM_IS_MODRM_REG_MODE(bRm))
8408 {
8409 /* register target */
8410 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8412 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8413 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8414 } IEM_MC_ELSE() {
8415 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8416 } IEM_MC_ENDIF();
8417 IEM_MC_ADVANCE_RIP_AND_FINISH();
8418 IEM_MC_END();
8419 }
8420 else
8421 {
8422 /* memory target */
8423 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8427 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8428 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8429 } IEM_MC_ELSE() {
8430 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8431 } IEM_MC_ENDIF();
8432 IEM_MC_ADVANCE_RIP_AND_FINISH();
8433 IEM_MC_END();
8434 }
8435}
8436
8437
8438/** Opcode 0x0f 0x94. */
8439FNIEMOP_DEF(iemOp_sete_Eb)
8440{
8441 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8442 IEMOP_HLP_MIN_386();
8443 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8444
8445 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8446 * any way. AMD says it's "unused", whatever that means. We're
8447 * ignoring for now. */
8448 if (IEM_IS_MODRM_REG_MODE(bRm))
8449 {
8450 /* register target */
8451 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8453 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8454 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8455 } IEM_MC_ELSE() {
8456 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8457 } IEM_MC_ENDIF();
8458 IEM_MC_ADVANCE_RIP_AND_FINISH();
8459 IEM_MC_END();
8460 }
8461 else
8462 {
8463 /* memory target */
8464 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8465 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8468 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8469 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8470 } IEM_MC_ELSE() {
8471 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8472 } IEM_MC_ENDIF();
8473 IEM_MC_ADVANCE_RIP_AND_FINISH();
8474 IEM_MC_END();
8475 }
8476}
8477
8478
8479/** Opcode 0x0f 0x95. */
8480FNIEMOP_DEF(iemOp_setne_Eb)
8481{
8482 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8483 IEMOP_HLP_MIN_386();
8484 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8485
8486 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8487 * any way. AMD says it's "unused", whatever that means. We're
8488 * ignoring for now. */
8489 if (IEM_IS_MODRM_REG_MODE(bRm))
8490 {
8491 /* register target */
8492 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8494 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8495 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8496 } IEM_MC_ELSE() {
8497 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8498 } IEM_MC_ENDIF();
8499 IEM_MC_ADVANCE_RIP_AND_FINISH();
8500 IEM_MC_END();
8501 }
8502 else
8503 {
8504 /* memory target */
8505 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8509 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8510 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8511 } IEM_MC_ELSE() {
8512 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8513 } IEM_MC_ENDIF();
8514 IEM_MC_ADVANCE_RIP_AND_FINISH();
8515 IEM_MC_END();
8516 }
8517}
8518
8519
8520/** Opcode 0x0f 0x96. */
8521FNIEMOP_DEF(iemOp_setbe_Eb)
8522{
8523 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8524 IEMOP_HLP_MIN_386();
8525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8526
8527 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8528 * any way. AMD says it's "unused", whatever that means. We're
8529 * ignoring for now. */
8530 if (IEM_IS_MODRM_REG_MODE(bRm))
8531 {
8532 /* register target */
8533 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8535 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8536 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8537 } IEM_MC_ELSE() {
8538 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8539 } IEM_MC_ENDIF();
8540 IEM_MC_ADVANCE_RIP_AND_FINISH();
8541 IEM_MC_END();
8542 }
8543 else
8544 {
8545 /* memory target */
8546 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8550 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8551 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8552 } IEM_MC_ELSE() {
8553 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8554 } IEM_MC_ENDIF();
8555 IEM_MC_ADVANCE_RIP_AND_FINISH();
8556 IEM_MC_END();
8557 }
8558}
8559
8560
8561/** Opcode 0x0f 0x97. */
8562FNIEMOP_DEF(iemOp_setnbe_Eb)
8563{
8564 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8565 IEMOP_HLP_MIN_386();
8566 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8567
8568 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8569 * any way. AMD says it's "unused", whatever that means. We're
8570 * ignoring for now. */
8571 if (IEM_IS_MODRM_REG_MODE(bRm))
8572 {
8573 /* register target */
8574 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8576 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8577 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8578 } IEM_MC_ELSE() {
8579 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8580 } IEM_MC_ENDIF();
8581 IEM_MC_ADVANCE_RIP_AND_FINISH();
8582 IEM_MC_END();
8583 }
8584 else
8585 {
8586 /* memory target */
8587 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8589 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8591 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8592 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8593 } IEM_MC_ELSE() {
8594 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8595 } IEM_MC_ENDIF();
8596 IEM_MC_ADVANCE_RIP_AND_FINISH();
8597 IEM_MC_END();
8598 }
8599}
8600
8601
8602/** Opcode 0x0f 0x98. */
8603FNIEMOP_DEF(iemOp_sets_Eb)
8604{
8605 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8606 IEMOP_HLP_MIN_386();
8607 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8608
8609 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8610 * any way. AMD says it's "unused", whatever that means. We're
8611 * ignoring for now. */
8612 if (IEM_IS_MODRM_REG_MODE(bRm))
8613 {
8614 /* register target */
8615 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8617 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8618 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8619 } IEM_MC_ELSE() {
8620 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8621 } IEM_MC_ENDIF();
8622 IEM_MC_ADVANCE_RIP_AND_FINISH();
8623 IEM_MC_END();
8624 }
8625 else
8626 {
8627 /* memory target */
8628 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8632 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8633 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8634 } IEM_MC_ELSE() {
8635 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8636 } IEM_MC_ENDIF();
8637 IEM_MC_ADVANCE_RIP_AND_FINISH();
8638 IEM_MC_END();
8639 }
8640}
8641
8642
8643/** Opcode 0x0f 0x99. */
8644FNIEMOP_DEF(iemOp_setns_Eb)
8645{
8646 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8647 IEMOP_HLP_MIN_386();
8648 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8649
8650 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8651 * any way. AMD says it's "unused", whatever that means. We're
8652 * ignoring for now. */
8653 if (IEM_IS_MODRM_REG_MODE(bRm))
8654 {
8655 /* register target */
8656 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8658 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8659 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8660 } IEM_MC_ELSE() {
8661 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8662 } IEM_MC_ENDIF();
8663 IEM_MC_ADVANCE_RIP_AND_FINISH();
8664 IEM_MC_END();
8665 }
8666 else
8667 {
8668 /* memory target */
8669 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8673 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8674 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8675 } IEM_MC_ELSE() {
8676 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8677 } IEM_MC_ENDIF();
8678 IEM_MC_ADVANCE_RIP_AND_FINISH();
8679 IEM_MC_END();
8680 }
8681}
8682
8683
8684/** Opcode 0x0f 0x9a. */
8685FNIEMOP_DEF(iemOp_setp_Eb)
8686{
8687 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8688 IEMOP_HLP_MIN_386();
8689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8690
8691 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8692 * any way. AMD says it's "unused", whatever that means. We're
8693 * ignoring for now. */
8694 if (IEM_IS_MODRM_REG_MODE(bRm))
8695 {
8696 /* register target */
8697 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8699 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8700 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8701 } IEM_MC_ELSE() {
8702 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8703 } IEM_MC_ENDIF();
8704 IEM_MC_ADVANCE_RIP_AND_FINISH();
8705 IEM_MC_END();
8706 }
8707 else
8708 {
8709 /* memory target */
8710 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8714 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8715 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8716 } IEM_MC_ELSE() {
8717 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8718 } IEM_MC_ENDIF();
8719 IEM_MC_ADVANCE_RIP_AND_FINISH();
8720 IEM_MC_END();
8721 }
8722}
8723
8724
8725/** Opcode 0x0f 0x9b. */
8726FNIEMOP_DEF(iemOp_setnp_Eb)
8727{
8728 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8729 IEMOP_HLP_MIN_386();
8730 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8731
8732 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8733 * any way. AMD says it's "unused", whatever that means. We're
8734 * ignoring for now. */
8735 if (IEM_IS_MODRM_REG_MODE(bRm))
8736 {
8737 /* register target */
8738 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8740 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8741 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8742 } IEM_MC_ELSE() {
8743 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8744 } IEM_MC_ENDIF();
8745 IEM_MC_ADVANCE_RIP_AND_FINISH();
8746 IEM_MC_END();
8747 }
8748 else
8749 {
8750 /* memory target */
8751 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8755 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8756 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8757 } IEM_MC_ELSE() {
8758 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8759 } IEM_MC_ENDIF();
8760 IEM_MC_ADVANCE_RIP_AND_FINISH();
8761 IEM_MC_END();
8762 }
8763}
8764
8765
8766/** Opcode 0x0f 0x9c. */
8767FNIEMOP_DEF(iemOp_setl_Eb)
8768{
8769 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8770 IEMOP_HLP_MIN_386();
8771 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8772
8773 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8774 * any way. AMD says it's "unused", whatever that means. We're
8775 * ignoring for now. */
8776 if (IEM_IS_MODRM_REG_MODE(bRm))
8777 {
8778 /* register target */
8779 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8781 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8782 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8783 } IEM_MC_ELSE() {
8784 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8785 } IEM_MC_ENDIF();
8786 IEM_MC_ADVANCE_RIP_AND_FINISH();
8787 IEM_MC_END();
8788 }
8789 else
8790 {
8791 /* memory target */
8792 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8796 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8797 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8798 } IEM_MC_ELSE() {
8799 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8800 } IEM_MC_ENDIF();
8801 IEM_MC_ADVANCE_RIP_AND_FINISH();
8802 IEM_MC_END();
8803 }
8804}
8805
8806
8807/** Opcode 0x0f 0x9d. */
8808FNIEMOP_DEF(iemOp_setnl_Eb)
8809{
8810 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8811 IEMOP_HLP_MIN_386();
8812 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8813
8814 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8815 * any way. AMD says it's "unused", whatever that means. We're
8816 * ignoring for now. */
8817 if (IEM_IS_MODRM_REG_MODE(bRm))
8818 {
8819 /* register target */
8820 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8822 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8823 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8824 } IEM_MC_ELSE() {
8825 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8826 } IEM_MC_ENDIF();
8827 IEM_MC_ADVANCE_RIP_AND_FINISH();
8828 IEM_MC_END();
8829 }
8830 else
8831 {
8832 /* memory target */
8833 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8837 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8838 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8839 } IEM_MC_ELSE() {
8840 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8841 } IEM_MC_ENDIF();
8842 IEM_MC_ADVANCE_RIP_AND_FINISH();
8843 IEM_MC_END();
8844 }
8845}
8846
8847
8848/** Opcode 0x0f 0x9e. */
8849FNIEMOP_DEF(iemOp_setle_Eb)
8850{
8851 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8852 IEMOP_HLP_MIN_386();
8853 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8854
8855 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8856 * any way. AMD says it's "unused", whatever that means. We're
8857 * ignoring for now. */
8858 if (IEM_IS_MODRM_REG_MODE(bRm))
8859 {
8860 /* register target */
8861 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8863 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8864 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8865 } IEM_MC_ELSE() {
8866 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8867 } IEM_MC_ENDIF();
8868 IEM_MC_ADVANCE_RIP_AND_FINISH();
8869 IEM_MC_END();
8870 }
8871 else
8872 {
8873 /* memory target */
8874 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8878 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8879 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8880 } IEM_MC_ELSE() {
8881 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8882 } IEM_MC_ENDIF();
8883 IEM_MC_ADVANCE_RIP_AND_FINISH();
8884 IEM_MC_END();
8885 }
8886}
8887
8888
8889/** Opcode 0x0f 0x9f. */
8890FNIEMOP_DEF(iemOp_setnle_Eb)
8891{
8892 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8893 IEMOP_HLP_MIN_386();
8894 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8895
8896 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8897 * any way. AMD says it's "unused", whatever that means. We're
8898 * ignoring for now. */
8899 if (IEM_IS_MODRM_REG_MODE(bRm))
8900 {
8901 /* register target */
8902 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8904 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8905 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8906 } IEM_MC_ELSE() {
8907 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8908 } IEM_MC_ENDIF();
8909 IEM_MC_ADVANCE_RIP_AND_FINISH();
8910 IEM_MC_END();
8911 }
8912 else
8913 {
8914 /* memory target */
8915 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8916 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8919 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8920 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8921 } IEM_MC_ELSE() {
8922 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8923 } IEM_MC_ENDIF();
8924 IEM_MC_ADVANCE_RIP_AND_FINISH();
8925 IEM_MC_END();
8926 }
8927}
8928
8929
8930/** Opcode 0x0f 0xa0. */
8931FNIEMOP_DEF(iemOp_push_fs)
8932{
8933 IEMOP_MNEMONIC(push_fs, "push fs");
8934 IEMOP_HLP_MIN_386();
8935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8936 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8937}
8938
8939
8940/** Opcode 0x0f 0xa1. */
8941FNIEMOP_DEF(iemOp_pop_fs)
8942{
8943 IEMOP_MNEMONIC(pop_fs, "pop fs");
8944 IEMOP_HLP_MIN_386();
8945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8946 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8947 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
8948 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8949 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8950 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8951 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8952 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS),
8953 iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8954}
8955
8956
8957/** Opcode 0x0f 0xa2. */
8958FNIEMOP_DEF(iemOp_cpuid)
8959{
8960 IEMOP_MNEMONIC(cpuid, "cpuid");
8961 IEMOP_HLP_MIN_486(); /* not all 486es. */
8962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8963 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT,
8964 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
8965 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
8966 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
8967 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX),
8968 iemCImpl_cpuid);
8969}
8970
8971
8972/**
8973 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8974 * iemOp_bts_Ev_Gv.
8975 */
8976
8977#define IEMOP_BODY_BIT_Ev_Gv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
8978 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8979 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8980 \
8981 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8982 { \
8983 /* register destination. */ \
8984 switch (pVCpu->iem.s.enmEffOpSize) \
8985 { \
8986 case IEMMODE_16BIT: \
8987 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
8988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8989 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8990 IEM_MC_ARG(uint16_t, u16Src, 1); \
8991 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8992 \
8993 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8994 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
8995 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8996 IEM_MC_REF_EFLAGS(pEFlags); \
8997 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
8998 \
8999 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9000 IEM_MC_END(); \
9001 break; \
9002 \
9003 case IEMMODE_32BIT: \
9004 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9006 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9007 IEM_MC_ARG(uint32_t, u32Src, 1); \
9008 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9009 \
9010 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9011 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9012 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9013 IEM_MC_REF_EFLAGS(pEFlags); \
9014 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9015 \
9016 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9017 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9018 IEM_MC_END(); \
9019 break; \
9020 \
9021 case IEMMODE_64BIT: \
9022 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9024 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9025 IEM_MC_ARG(uint64_t, u64Src, 1); \
9026 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9027 \
9028 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9029 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9030 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9031 IEM_MC_REF_EFLAGS(pEFlags); \
9032 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9033 \
9034 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9035 IEM_MC_END(); \
9036 break; \
9037 \
9038 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9039 } \
9040 } \
9041 else \
9042 { \
9043 /* memory destination. */ \
9044 /** @todo test negative bit offsets! */ \
9045 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
9046 { \
9047 switch (pVCpu->iem.s.enmEffOpSize) \
9048 { \
9049 case IEMMODE_16BIT: \
9050 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9053 IEMOP_HLP_DONE_DECODING(); \
9054 \
9055 IEM_MC_ARG(uint16_t, u16Src, 1); \
9056 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9057 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9058 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9059 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9060 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9061 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9062 \
9063 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9064 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9065 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9066 \
9067 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9068 IEM_MC_FETCH_EFLAGS(EFlags); \
9069 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9070 \
9071 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9072 IEM_MC_COMMIT_EFLAGS(EFlags); \
9073 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9074 IEM_MC_END(); \
9075 break; \
9076 \
9077 case IEMMODE_32BIT: \
9078 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9081 IEMOP_HLP_DONE_DECODING(); \
9082 \
9083 IEM_MC_ARG(uint32_t, u32Src, 1); \
9084 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9085 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9086 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9087 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9088 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9089 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9090 \
9091 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9092 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9093 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9094 \
9095 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9096 IEM_MC_FETCH_EFLAGS(EFlags); \
9097 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9098 \
9099 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9100 IEM_MC_COMMIT_EFLAGS(EFlags); \
9101 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9102 IEM_MC_END(); \
9103 break; \
9104 \
9105 case IEMMODE_64BIT: \
9106 IEM_MC_BEGIN(3, 5, IEM_MC_F_64BIT, 0); \
9107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9109 IEMOP_HLP_DONE_DECODING(); \
9110 \
9111 IEM_MC_ARG(uint64_t, u64Src, 1); \
9112 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9113 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9114 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9115 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9116 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9117 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9118 \
9119 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9120 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9121 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9122 \
9123 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9124 IEM_MC_FETCH_EFLAGS(EFlags); \
9125 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9126 \
9127 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9128 IEM_MC_COMMIT_EFLAGS(EFlags); \
9129 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9130 IEM_MC_END(); \
9131 break; \
9132 \
9133 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9134 } \
9135 } \
9136 else \
9137 { \
9138 (void)0
9139/* Separate macro to work around parsing issue in IEMAllInstPython.py */
9140#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
9141 switch (pVCpu->iem.s.enmEffOpSize) \
9142 { \
9143 case IEMMODE_16BIT: \
9144 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9146 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9147 IEMOP_HLP_DONE_DECODING(); \
9148 \
9149 IEM_MC_ARG(uint16_t, u16Src, 1); \
9150 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9151 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9152 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9153 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9154 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9155 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9156 \
9157 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9158 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9159 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9160 \
9161 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9162 IEM_MC_FETCH_EFLAGS(EFlags); \
9163 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
9164 \
9165 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9166 IEM_MC_COMMIT_EFLAGS(EFlags); \
9167 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9168 IEM_MC_END(); \
9169 break; \
9170 \
9171 case IEMMODE_32BIT: \
9172 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9175 IEMOP_HLP_DONE_DECODING(); \
9176 \
9177 IEM_MC_ARG(uint32_t, u32Src, 1); \
9178 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9179 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9180 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9181 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9182 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9183 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9184 \
9185 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9186 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9187 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9188 \
9189 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9190 IEM_MC_FETCH_EFLAGS(EFlags); \
9191 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
9192 \
9193 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9194 IEM_MC_COMMIT_EFLAGS(EFlags); \
9195 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9196 IEM_MC_END(); \
9197 break; \
9198 \
9199 case IEMMODE_64BIT: \
9200 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0); \
9201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9203 IEMOP_HLP_DONE_DECODING(); \
9204 \
9205 IEM_MC_ARG(uint64_t, u64Src, 1); \
9206 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9207 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9208 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9209 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9210 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9211 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9212 \
9213 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9214 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9215 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9216 \
9217 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9218 IEM_MC_FETCH_EFLAGS(EFlags); \
9219 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
9220 \
9221 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
9222 IEM_MC_COMMIT_EFLAGS(EFlags); \
9223 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9224 IEM_MC_END(); \
9225 break; \
9226 \
9227 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9228 } \
9229 } \
9230 } \
9231 (void)0
9232
9233/* Read-only version (bt). */
9234#define IEMOP_BODY_BIT_Ev_Gv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
9235 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9236 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
9237 \
9238 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9239 { \
9240 /* register destination. */ \
9241 switch (pVCpu->iem.s.enmEffOpSize) \
9242 { \
9243 case IEMMODE_16BIT: \
9244 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9246 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
9247 IEM_MC_ARG(uint16_t, u16Src, 1); \
9248 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9249 \
9250 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9251 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
9252 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9253 IEM_MC_REF_EFLAGS(pEFlags); \
9254 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9255 \
9256 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9257 IEM_MC_END(); \
9258 break; \
9259 \
9260 case IEMMODE_32BIT: \
9261 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9263 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
9264 IEM_MC_ARG(uint32_t, u32Src, 1); \
9265 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9266 \
9267 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9268 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9269 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9270 IEM_MC_REF_EFLAGS(pEFlags); \
9271 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9272 \
9273 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9274 IEM_MC_END(); \
9275 break; \
9276 \
9277 case IEMMODE_64BIT: \
9278 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9280 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
9281 IEM_MC_ARG(uint64_t, u64Src, 1); \
9282 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9283 \
9284 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9285 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9286 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9287 IEM_MC_REF_EFLAGS(pEFlags); \
9288 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9289 \
9290 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9291 IEM_MC_END(); \
9292 break; \
9293 \
9294 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9295 } \
9296 } \
9297 else \
9298 { \
9299 /* memory destination. */ \
9300 /** @todo test negative bit offsets! */ \
9301 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9302 { \
9303 switch (pVCpu->iem.s.enmEffOpSize) \
9304 { \
9305 case IEMMODE_16BIT: \
9306 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9309 IEMOP_HLP_DONE_DECODING(); \
9310 \
9311 IEM_MC_ARG(uint16_t, u16Src, 1); \
9312 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9313 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9314 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9315 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9316 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9317 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9318 \
9319 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9320 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
9321 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9322 \
9323 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9324 IEM_MC_FETCH_EFLAGS(EFlags); \
9325 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9326 \
9327 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9328 IEM_MC_COMMIT_EFLAGS(EFlags); \
9329 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9330 IEM_MC_END(); \
9331 break; \
9332 \
9333 case IEMMODE_32BIT: \
9334 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9337 IEMOP_HLP_DONE_DECODING(); \
9338 \
9339 IEM_MC_ARG(uint32_t, u32Src, 1); \
9340 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9341 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9342 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9343 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9344 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9345 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9346 \
9347 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
9348 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9349 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9350 \
9351 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9352 IEM_MC_FETCH_EFLAGS(EFlags); \
9353 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9354 \
9355 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9356 IEM_MC_COMMIT_EFLAGS(EFlags); \
9357 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9358 IEM_MC_END(); \
9359 break; \
9360 \
9361 case IEMMODE_64BIT: \
9362 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0); \
9363 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9365 IEMOP_HLP_DONE_DECODING(); \
9366 \
9367 IEM_MC_ARG(uint64_t, u64Src, 1); \
9368 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9369 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9370 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9371 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9372 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9373 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9374 \
9375 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9376 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
9377 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9378 \
9379 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9380 IEM_MC_FETCH_EFLAGS(EFlags); \
9381 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9382 \
9383 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
9384 IEM_MC_COMMIT_EFLAGS(EFlags); \
9385 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9386 IEM_MC_END(); \
9387 break; \
9388 \
9389 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9390 } \
9391 } \
9392 else \
9393 { \
9394 IEMOP_HLP_DONE_DECODING(); \
9395 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
9396 } \
9397 } \
9398 (void)0
9399
9400
9401/** Opcode 0x0f 0xa3. */
9402FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9403{
9404 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9405 IEMOP_HLP_MIN_386();
9406 IEMOP_BODY_BIT_Ev_Gv_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
9407}
9408
9409
9410/**
9411 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9412 */
9413FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
9414{
9415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9416 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9417
9418 if (IEM_IS_MODRM_REG_MODE(bRm))
9419 {
9420 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9421
9422 switch (pVCpu->iem.s.enmEffOpSize)
9423 {
9424 case IEMMODE_16BIT:
9425 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0);
9426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9427 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9428 IEM_MC_ARG(uint16_t, u16Src, 1);
9429 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9430 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9431
9432 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9433 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9434 IEM_MC_REF_EFLAGS(pEFlags);
9435 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9436
9437 IEM_MC_ADVANCE_RIP_AND_FINISH();
9438 IEM_MC_END();
9439 break;
9440
9441 case IEMMODE_32BIT:
9442 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0);
9443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9444 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9445 IEM_MC_ARG(uint32_t, u32Src, 1);
9446 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9447 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9448
9449 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9450 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9451 IEM_MC_REF_EFLAGS(pEFlags);
9452 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9453
9454 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
9455 IEM_MC_ADVANCE_RIP_AND_FINISH();
9456 IEM_MC_END();
9457 break;
9458
9459 case IEMMODE_64BIT:
9460 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT, 0);
9461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9462 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9463 IEM_MC_ARG(uint64_t, u64Src, 1);
9464 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9465 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9466
9467 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9468 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9469 IEM_MC_REF_EFLAGS(pEFlags);
9470 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9471
9472 IEM_MC_ADVANCE_RIP_AND_FINISH();
9473 IEM_MC_END();
9474 break;
9475
9476 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9477 }
9478 }
9479 else
9480 {
9481 switch (pVCpu->iem.s.enmEffOpSize)
9482 {
9483 case IEMMODE_16BIT:
9484 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0);
9485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9486 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9487
9488 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9490
9491 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9492 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9493 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9494
9495 IEM_MC_ARG(uint16_t, u16Src, 1);
9496 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9497 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2);
9498 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9499 IEM_MC_FETCH_EFLAGS(EFlags);
9500 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9501
9502 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
9503 IEM_MC_COMMIT_EFLAGS(EFlags);
9504 IEM_MC_ADVANCE_RIP_AND_FINISH();
9505 IEM_MC_END();
9506 break;
9507
9508 case IEMMODE_32BIT:
9509 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0);
9510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9512
9513 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9515
9516 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9517 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9518 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9519
9520 IEM_MC_ARG(uint32_t, u32Src, 1);
9521 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9522 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2);
9523 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9524 IEM_MC_FETCH_EFLAGS(EFlags);
9525 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9526
9527 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
9528 IEM_MC_COMMIT_EFLAGS(EFlags);
9529 IEM_MC_ADVANCE_RIP_AND_FINISH();
9530 IEM_MC_END();
9531 break;
9532
9533 case IEMMODE_64BIT:
9534 IEM_MC_BEGIN(4, 3, IEM_MC_F_64BIT, 0);
9535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9537
9538 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9540
9541 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9542 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9543 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9544
9545 IEM_MC_ARG(uint64_t, u64Src, 1);
9546 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9547 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2);
9548 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9549 IEM_MC_FETCH_EFLAGS(EFlags);
9550
9551 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9552
9553 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
9554 IEM_MC_COMMIT_EFLAGS(EFlags);
9555 IEM_MC_ADVANCE_RIP_AND_FINISH();
9556 IEM_MC_END();
9557 break;
9558
9559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9560 }
9561 }
9562}
9563
9564
9565/**
9566 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9567 */
9568FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
9569{
9570 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9571 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9572
9573 if (IEM_IS_MODRM_REG_MODE(bRm))
9574 {
9575 switch (pVCpu->iem.s.enmEffOpSize)
9576 {
9577 case IEMMODE_16BIT:
9578 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0);
9579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9580 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9581 IEM_MC_ARG(uint16_t, u16Src, 1);
9582 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9583 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9584
9585 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9586 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9587 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9588 IEM_MC_REF_EFLAGS(pEFlags);
9589 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9590
9591 IEM_MC_ADVANCE_RIP_AND_FINISH();
9592 IEM_MC_END();
9593 break;
9594
9595 case IEMMODE_32BIT:
9596 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0);
9597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9598 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9599 IEM_MC_ARG(uint32_t, u32Src, 1);
9600 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9601 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9602
9603 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9604 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9605 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9606 IEM_MC_REF_EFLAGS(pEFlags);
9607 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9608
9609 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
9610 IEM_MC_ADVANCE_RIP_AND_FINISH();
9611 IEM_MC_END();
9612 break;
9613
9614 case IEMMODE_64BIT:
9615 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT, 0);
9616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9617 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9618 IEM_MC_ARG(uint64_t, u64Src, 1);
9619 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9620 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9621
9622 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9623 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9624 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9625 IEM_MC_REF_EFLAGS(pEFlags);
9626 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9627
9628 IEM_MC_ADVANCE_RIP_AND_FINISH();
9629 IEM_MC_END();
9630 break;
9631
9632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9633 }
9634 }
9635 else
9636 {
9637 switch (pVCpu->iem.s.enmEffOpSize)
9638 {
9639 case IEMMODE_16BIT:
9640 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0);
9641 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9642 IEM_MC_ARG(uint16_t, u16Src, 1);
9643 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9644 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9646 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9647
9648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9650 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9651 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9652 IEM_MC_FETCH_EFLAGS(EFlags);
9653 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9654 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9655
9656 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
9657 IEM_MC_COMMIT_EFLAGS(EFlags);
9658 IEM_MC_ADVANCE_RIP_AND_FINISH();
9659 IEM_MC_END();
9660 break;
9661
9662 case IEMMODE_32BIT:
9663 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0);
9664 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9665 IEM_MC_ARG(uint32_t, u32Src, 1);
9666 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9667 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9669 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9670
9671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9673 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9674 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9675 IEM_MC_FETCH_EFLAGS(EFlags);
9676 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9677 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9678
9679 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
9680 IEM_MC_COMMIT_EFLAGS(EFlags);
9681 IEM_MC_ADVANCE_RIP_AND_FINISH();
9682 IEM_MC_END();
9683 break;
9684
9685 case IEMMODE_64BIT:
9686 IEM_MC_BEGIN(4, 3, IEM_MC_F_64BIT, 0);
9687 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9688 IEM_MC_ARG(uint64_t, u64Src, 1);
9689 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9690 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9692 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9693
9694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9696 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9697 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9698 IEM_MC_FETCH_EFLAGS(EFlags);
9699 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9700 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9701
9702 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
9703 IEM_MC_COMMIT_EFLAGS(EFlags);
9704 IEM_MC_ADVANCE_RIP_AND_FINISH();
9705 IEM_MC_END();
9706 break;
9707
9708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9709 }
9710 }
9711}
9712
9713
9714
9715/** Opcode 0x0f 0xa4. */
9716FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9717{
9718 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9719 IEMOP_HLP_MIN_386();
9720 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9721}
9722
9723
9724/** Opcode 0x0f 0xa5. */
9725FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9726{
9727 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9728 IEMOP_HLP_MIN_386();
9729 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9730}
9731
9732
9733/** Opcode 0x0f 0xa8. */
9734FNIEMOP_DEF(iemOp_push_gs)
9735{
9736 IEMOP_MNEMONIC(push_gs, "push gs");
9737 IEMOP_HLP_MIN_386();
9738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9739 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9740}
9741
9742
9743/** Opcode 0x0f 0xa9. */
9744FNIEMOP_DEF(iemOp_pop_gs)
9745{
9746 IEMOP_MNEMONIC(pop_gs, "pop gs");
9747 IEMOP_HLP_MIN_386();
9748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9749 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9750 IEM_MC_DEFER_TO_CIMPL_2_RET(0,
9751 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9752 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9753 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9754 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9755 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9756 iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9757}
9758
9759
9760/** Opcode 0x0f 0xaa. */
9761FNIEMOP_DEF(iemOp_rsm)
9762{
9763 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9764 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9766 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9767 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 0,
9768 iemCImpl_rsm);
9769}
9770
9771
9772
9773/** Opcode 0x0f 0xab. */
9774FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9775{
9776 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9777 IEMOP_HLP_MIN_386();
9778 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
9779 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9780}
9781
9782
9783/** Opcode 0x0f 0xac. */
9784FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9785{
9786 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9787 IEMOP_HLP_MIN_386();
9788 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9789}
9790
9791
9792/** Opcode 0x0f 0xad. */
9793FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9794{
9795 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9796 IEMOP_HLP_MIN_386();
9797 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9798}
9799
9800
9801/** Opcode 0x0f 0xae mem/0. */
9802FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9803{
9804 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9805 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9806 IEMOP_RAISE_INVALID_OPCODE_RET();
9807
9808 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_PENTIUM_II, 0);
9809 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9812 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9813 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9814 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9815 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9816 IEM_MC_END();
9817}
9818
9819
9820/** Opcode 0x0f 0xae mem/1. */
9821FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9822{
9823 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9824 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9825 IEMOP_RAISE_INVALID_OPCODE_RET();
9826
9827 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_PENTIUM_II, 0);
9828 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9831 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9832 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9833 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9834 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9835 IEM_MC_END();
9836}
9837
9838
9839/**
9840 * @opmaps grp15
9841 * @opcode !11/2
9842 * @oppfx none
9843 * @opcpuid sse
9844 * @opgroup og_sse_mxcsrsm
9845 * @opxcpttype 5
9846 * @optest op1=0 -> mxcsr=0
9847 * @optest op1=0x2083 -> mxcsr=0x2083
9848 * @optest op1=0xfffffffe -> value.xcpt=0xd
9849 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9850 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9851 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9852 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9853 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9854 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9855 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9856 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9857 */
9858FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9859{
9860 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9861 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9862 IEMOP_RAISE_INVALID_OPCODE_RET();
9863
9864 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_PENTIUM_II, 0);
9865 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9868 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9869 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9870 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9871 IEM_MC_END();
9872}
9873
9874
9875/**
9876 * @opmaps grp15
9877 * @opcode !11/3
9878 * @oppfx none
9879 * @opcpuid sse
9880 * @opgroup og_sse_mxcsrsm
9881 * @opxcpttype 5
9882 * @optest mxcsr=0 -> op1=0
9883 * @optest mxcsr=0x2083 -> op1=0x2083
9884 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9885 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9886 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9887 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9888 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9889 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9890 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9891 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9892 */
9893FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9894{
9895 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9896 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9897 IEMOP_RAISE_INVALID_OPCODE_RET();
9898
9899 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_PENTIUM_II, 0);
9900 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9903 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9904 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9905 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9906 IEM_MC_END();
9907}
9908
9909
9910/**
9911 * @opmaps grp15
9912 * @opcode !11/4
9913 * @oppfx none
9914 * @opcpuid xsave
9915 * @opgroup og_system
9916 * @opxcpttype none
9917 */
9918FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9919{
9920 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9921 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9922 IEMOP_RAISE_INVALID_OPCODE_RET();
9923
9924 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_CORE, 0);
9925 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9928 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9929 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9930 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9931 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9932 IEM_MC_END();
9933}
9934
9935
9936/**
9937 * @opmaps grp15
9938 * @opcode !11/5
9939 * @oppfx none
9940 * @opcpuid xsave
9941 * @opgroup og_system
9942 * @opxcpttype none
9943 */
9944FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9945{
9946 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9947 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9948 IEMOP_RAISE_INVALID_OPCODE_RET();
9949
9950 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_CORE, 0);
9951 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9954 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9955 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9956 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9957 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9958 IEM_MC_END();
9959}
9960
9961/** Opcode 0x0f 0xae mem/6. */
9962FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9963
9964/**
9965 * @opmaps grp15
9966 * @opcode !11/7
9967 * @oppfx none
9968 * @opcpuid clfsh
9969 * @opgroup og_cachectl
9970 * @optest op1=1 ->
9971 */
9972FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9973{
9974 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9975 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9976 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9977
9978 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
9979 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9982 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9983 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9984 IEM_MC_END();
9985}
9986
9987/**
9988 * @opmaps grp15
9989 * @opcode !11/7
9990 * @oppfx 0x66
9991 * @opcpuid clflushopt
9992 * @opgroup og_cachectl
9993 * @optest op1=1 ->
9994 */
9995FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9996{
9997 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9998 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9999 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
10000
10001 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
10002 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
10003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
10004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10005 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
10006 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
10007 IEM_MC_END();
10008}
10009
10010
10011/** Opcode 0x0f 0xae 11b/5. */
10012FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
10013{
10014 RT_NOREF_PV(bRm);
10015 IEMOP_MNEMONIC(lfence, "lfence");
10016 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
10017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
10018#ifdef RT_ARCH_ARM64
10019 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
10020#else
10021 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
10022 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
10023 else
10024 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
10025#endif
10026 IEM_MC_ADVANCE_RIP_AND_FINISH();
10027 IEM_MC_END();
10028}
10029
10030
10031/** Opcode 0x0f 0xae 11b/6. */
10032FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
10033{
10034 RT_NOREF_PV(bRm);
10035 IEMOP_MNEMONIC(mfence, "mfence");
10036 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
10037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
10038#ifdef RT_ARCH_ARM64
10039 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
10040#else
10041 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
10042 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
10043 else
10044 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
10045#endif
10046 IEM_MC_ADVANCE_RIP_AND_FINISH();
10047 IEM_MC_END();
10048}
10049
10050
10051/** Opcode 0x0f 0xae 11b/7. */
10052FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
10053{
10054 RT_NOREF_PV(bRm);
10055 IEMOP_MNEMONIC(sfence, "sfence");
10056 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
10057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
10058#ifdef RT_ARCH_ARM64
10059 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
10060#else
10061 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
10062 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
10063 else
10064 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
10065#endif
10066 IEM_MC_ADVANCE_RIP_AND_FINISH();
10067 IEM_MC_END();
10068}
10069
10070
10071/** Opcode 0xf3 0x0f 0xae 11b/0. */
10072FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
10073{
10074 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
10075 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10076 {
10077 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10079 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10080 IEM_MC_LOCAL(uint64_t, u64Dst);
10081 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
10082 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10083 IEM_MC_ADVANCE_RIP_AND_FINISH();
10084 IEM_MC_END();
10085 }
10086 else
10087 {
10088 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
10089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10090 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10091 IEM_MC_LOCAL(uint32_t, u32Dst);
10092 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
10093 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10094 IEM_MC_ADVANCE_RIP_AND_FINISH();
10095 IEM_MC_END();
10096 }
10097}
10098
10099
10100/** Opcode 0xf3 0x0f 0xae 11b/1. */
10101FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
10102{
10103 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
10104 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10105 {
10106 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10108 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10109 IEM_MC_LOCAL(uint64_t, u64Dst);
10110 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
10111 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10112 IEM_MC_ADVANCE_RIP_AND_FINISH();
10113 IEM_MC_END();
10114 }
10115 else
10116 {
10117 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
10118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10119 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10120 IEM_MC_LOCAL(uint32_t, u32Dst);
10121 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
10122 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10123 IEM_MC_ADVANCE_RIP_AND_FINISH();
10124 IEM_MC_END();
10125 }
10126}
10127
10128
10129/** Opcode 0xf3 0x0f 0xae 11b/2. */
10130FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
10131{
10132 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
10133 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10134 {
10135 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10137 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10138 IEM_MC_LOCAL(uint64_t, u64Dst);
10139 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10140 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10141 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
10142 IEM_MC_ADVANCE_RIP_AND_FINISH();
10143 IEM_MC_END();
10144 }
10145 else
10146 {
10147 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
10148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10149 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10150 IEM_MC_LOCAL(uint32_t, u32Dst);
10151 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10152 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
10153 IEM_MC_ADVANCE_RIP_AND_FINISH();
10154 IEM_MC_END();
10155 }
10156}
10157
10158
10159/** Opcode 0xf3 0x0f 0xae 11b/3. */
10160FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
10161{
10162 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
10163 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10164 {
10165 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10167 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10168 IEM_MC_LOCAL(uint64_t, u64Dst);
10169 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10170 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10171 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
10172 IEM_MC_ADVANCE_RIP_AND_FINISH();
10173 IEM_MC_END();
10174 }
10175 else
10176 {
10177 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
10178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10179 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10180 IEM_MC_LOCAL(uint32_t, u32Dst);
10181 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10182 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10183 IEM_MC_ADVANCE_RIP_AND_FINISH();
10184 IEM_MC_END();
10185 }
10186}
10187
10188
10189/**
10190 * Group 15 jump table for register variant.
10191 */
10192IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10193{ /* pfx: none, 066h, 0f3h, 0f2h */
10194 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10195 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10196 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10197 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10198 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10199 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10200 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10201 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10202};
10203AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10204
10205
10206/**
10207 * Group 15 jump table for memory variant.
10208 */
10209IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10210{ /* pfx: none, 066h, 0f3h, 0f2h */
10211 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10212 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10213 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10214 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10215 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10216 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10217 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10218 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10219};
10220AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10221
10222
10223/** Opcode 0x0f 0xae. */
10224FNIEMOP_DEF(iemOp_Grp15)
10225{
10226 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10227 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10228 if (IEM_IS_MODRM_REG_MODE(bRm))
10229 /* register, register */
10230 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10231 + pVCpu->iem.s.idxPrefix], bRm);
10232 /* memory, register */
10233 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10234 + pVCpu->iem.s.idxPrefix], bRm);
10235}
10236
10237
10238/** Opcode 0x0f 0xaf. */
10239FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10240{
10241 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10242 IEMOP_HLP_MIN_386();
10243 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10244 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10245 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_MIN_386);
10246}
10247
10248
10249/** Opcode 0x0f 0xb0. */
10250FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10251{
10252 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10253 IEMOP_HLP_MIN_486();
10254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10255
10256 if (IEM_IS_MODRM_REG_MODE(bRm))
10257 {
10258 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486, 0);
10259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10260 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10261 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10262 IEM_MC_ARG(uint8_t, u8Src, 2);
10263 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10264
10265 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10266 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10267 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10268 IEM_MC_REF_EFLAGS(pEFlags);
10269 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10270
10271 IEM_MC_ADVANCE_RIP_AND_FINISH();
10272 IEM_MC_END();
10273 }
10274 else
10275 {
10276#define IEMOP_BODY_CMPXCHG_BYTE(a_fnWorker, a_Type) \
10277 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0); \
10278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10280 IEMOP_HLP_DONE_DECODING(); \
10281 \
10282 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10283 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
10284 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10285 \
10286 IEM_MC_ARG(uint8_t, u8Src, 2); \
10287 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10288 \
10289 IEM_MC_LOCAL(uint8_t, u8Al); \
10290 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX); \
10291 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Al, u8Al, 1); \
10292 \
10293 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10294 IEM_MC_FETCH_EFLAGS(EFlags); \
10295 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu8Dst, pu8Al, u8Src, pEFlags); \
10296 \
10297 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10298 IEM_MC_COMMIT_EFLAGS(EFlags); \
10299 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al); \
10300 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10301 IEM_MC_END()
10302
10303 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10304 {
10305 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8,RW);
10306 }
10307 else
10308 {
10309 IEMOP_BODY_CMPXCHG_BYTE(iemAImpl_cmpxchg_u8_locked,ATOMIC);
10310 }
10311 }
10312}
10313
10314/** Opcode 0x0f 0xb1. */
10315FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10316{
10317 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10318 IEMOP_HLP_MIN_486();
10319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10320
10321 if (IEM_IS_MODRM_REG_MODE(bRm))
10322 {
10323 switch (pVCpu->iem.s.enmEffOpSize)
10324 {
10325 case IEMMODE_16BIT:
10326 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486, 0);
10327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10328 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10329 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10330 IEM_MC_ARG(uint16_t, u16Src, 2);
10331 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10332
10333 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10334 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10335 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10336 IEM_MC_REF_EFLAGS(pEFlags);
10337 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10338
10339 IEM_MC_ADVANCE_RIP_AND_FINISH();
10340 IEM_MC_END();
10341 break;
10342
10343 case IEMMODE_32BIT:
10344 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486, 0);
10345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10346 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10347 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10348 IEM_MC_ARG(uint32_t, u32Src, 2);
10349 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10350
10351 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10352 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10353 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10354 IEM_MC_REF_EFLAGS(pEFlags);
10355 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10356
10357 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10358 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
10359 } IEM_MC_ELSE() {
10360 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
10361 } IEM_MC_ENDIF();
10362
10363 IEM_MC_ADVANCE_RIP_AND_FINISH();
10364 IEM_MC_END();
10365 break;
10366
10367 case IEMMODE_64BIT:
10368 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT, 0);
10369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10370 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10371 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10372 IEM_MC_ARG(uint64_t, u64Src, 2);
10373 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10374
10375 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10376 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10377 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10378 IEM_MC_REF_EFLAGS(pEFlags);
10379 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10380
10381 IEM_MC_ADVANCE_RIP_AND_FINISH();
10382 IEM_MC_END();
10383 break;
10384
10385 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10386 }
10387 }
10388 else
10389 {
10390#define IEMOP_BODY_CMPXCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64,a_Type) \
10391 do { \
10392 switch (pVCpu->iem.s.enmEffOpSize) \
10393 { \
10394 case IEMMODE_16BIT: \
10395 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0); \
10396 \
10397 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10400 IEMOP_HLP_DONE_DECODING(); \
10401 \
10402 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10403 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10404 \
10405 IEM_MC_ARG(uint16_t, u16Src, 2); \
10406 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10407 \
10408 IEM_MC_LOCAL(uint16_t, u16Ax); \
10409 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX); \
10410 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Ax, u16Ax, 1); \
10411 \
10412 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10413 IEM_MC_FETCH_EFLAGS(EFlags); \
10414 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker16, pu16Dst, pu16Ax, u16Src, pEFlags); \
10415 \
10416 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10417 IEM_MC_COMMIT_EFLAGS(EFlags); \
10418 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax); \
10419 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10420 IEM_MC_END(); \
10421 break; \
10422 \
10423 case IEMMODE_32BIT: \
10424 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0); \
10425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10427 IEMOP_HLP_DONE_DECODING(); \
10428 \
10429 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10430 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10431 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10432 \
10433 IEM_MC_ARG(uint32_t, u32Src, 2); \
10434 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10435 \
10436 IEM_MC_LOCAL(uint32_t, u32Eax); \
10437 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX); \
10438 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Eax, u32Eax, 1); \
10439 \
10440 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10441 IEM_MC_FETCH_EFLAGS(EFlags); \
10442 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker32, pu32Dst, pu32Eax, u32Src, pEFlags); \
10443 \
10444 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10445 IEM_MC_COMMIT_EFLAGS(EFlags); \
10446 \
10447 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
10448 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax); \
10449 } IEM_MC_ENDIF(); \
10450 \
10451 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10452 IEM_MC_END(); \
10453 break; \
10454 \
10455 case IEMMODE_64BIT: \
10456 IEM_MC_BEGIN(4, 4, IEM_MC_F_64BIT, 0); \
10457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
10459 IEMOP_HLP_DONE_DECODING(); \
10460 \
10461 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10462 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10463 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10464 \
10465 IEM_MC_ARG(uint64_t, u64Src, 2); \
10466 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
10467 \
10468 IEM_MC_LOCAL(uint64_t, u64Rax); \
10469 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX); \
10470 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Rax, u64Rax, 1); \
10471 \
10472 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
10473 IEM_MC_FETCH_EFLAGS(EFlags); \
10474 \
10475 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker64, pu64Dst, pu64Rax, u64Src, pEFlags); \
10476 \
10477 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
10478 IEM_MC_COMMIT_EFLAGS(EFlags); \
10479 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax); \
10480 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10481 IEM_MC_END(); \
10482 break; \
10483 \
10484 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10485 } \
10486 } while (0)
10487
10488 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
10489 {
10490 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16, iemAImpl_cmpxchg_u32, iemAImpl_cmpxchg_u64,RW);
10491 }
10492 else
10493 {
10494 IEMOP_BODY_CMPXCHG_EV_GV(iemAImpl_cmpxchg_u16_locked, iemAImpl_cmpxchg_u32_locked, iemAImpl_cmpxchg_u64_locked,ATOMIC);
10495 }
10496 }
10497}
10498
10499
10500/** Opcode 0x0f 0xb2. */
10501FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10502{
10503 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10504 IEMOP_HLP_MIN_386();
10505 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10506 if (IEM_IS_MODRM_REG_MODE(bRm))
10507 IEMOP_RAISE_INVALID_OPCODE_RET();
10508 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10509}
10510
10511
10512/** Opcode 0x0f 0xb3. */
10513FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10514{
10515 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10516 IEMOP_HLP_MIN_386();
10517 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
10518 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10519}
10520
10521
10522/** Opcode 0x0f 0xb4. */
10523FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10524{
10525 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10526 IEMOP_HLP_MIN_386();
10527 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10528 if (IEM_IS_MODRM_REG_MODE(bRm))
10529 IEMOP_RAISE_INVALID_OPCODE_RET();
10530 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10531}
10532
10533
10534/** Opcode 0x0f 0xb5. */
10535FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10536{
10537 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10538 IEMOP_HLP_MIN_386();
10539 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10540 if (IEM_IS_MODRM_REG_MODE(bRm))
10541 IEMOP_RAISE_INVALID_OPCODE_RET();
10542 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10543}
10544
10545
10546/** Opcode 0x0f 0xb6. */
10547FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10548{
10549 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10550 IEMOP_HLP_MIN_386();
10551
10552 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10553
10554 /*
10555 * If rm is denoting a register, no more instruction bytes.
10556 */
10557 if (IEM_IS_MODRM_REG_MODE(bRm))
10558 {
10559 switch (pVCpu->iem.s.enmEffOpSize)
10560 {
10561 case IEMMODE_16BIT:
10562 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
10563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10564 IEM_MC_LOCAL(uint16_t, u16Value);
10565 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10566 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10567 IEM_MC_ADVANCE_RIP_AND_FINISH();
10568 IEM_MC_END();
10569 break;
10570
10571 case IEMMODE_32BIT:
10572 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
10573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10574 IEM_MC_LOCAL(uint32_t, u32Value);
10575 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10576 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10577 IEM_MC_ADVANCE_RIP_AND_FINISH();
10578 IEM_MC_END();
10579 break;
10580
10581 case IEMMODE_64BIT:
10582 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10584 IEM_MC_LOCAL(uint64_t, u64Value);
10585 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10586 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10587 IEM_MC_ADVANCE_RIP_AND_FINISH();
10588 IEM_MC_END();
10589 break;
10590
10591 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10592 }
10593 }
10594 else
10595 {
10596 /*
10597 * We're loading a register from memory.
10598 */
10599 switch (pVCpu->iem.s.enmEffOpSize)
10600 {
10601 case IEMMODE_16BIT:
10602 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
10603 IEM_MC_LOCAL(uint16_t, u16Value);
10604 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10607 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10608 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10609 IEM_MC_ADVANCE_RIP_AND_FINISH();
10610 IEM_MC_END();
10611 break;
10612
10613 case IEMMODE_32BIT:
10614 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
10615 IEM_MC_LOCAL(uint32_t, u32Value);
10616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10619 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10620 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10621 IEM_MC_ADVANCE_RIP_AND_FINISH();
10622 IEM_MC_END();
10623 break;
10624
10625 case IEMMODE_64BIT:
10626 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
10627 IEM_MC_LOCAL(uint64_t, u64Value);
10628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10631 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10632 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10633 IEM_MC_ADVANCE_RIP_AND_FINISH();
10634 IEM_MC_END();
10635 break;
10636
10637 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10638 }
10639 }
10640}
10641
10642
10643/** Opcode 0x0f 0xb7. */
10644FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10645{
10646 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10647 IEMOP_HLP_MIN_386();
10648
10649 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10650
10651 /** @todo Not entirely sure how the operand size prefix is handled here,
10652 * assuming that it will be ignored. Would be nice to have a few
10653 * test for this. */
10654
10655 /** @todo There should be no difference in the behaviour whether REX.W is
10656 * present or not... */
10657
10658 /*
10659 * If rm is denoting a register, no more instruction bytes.
10660 */
10661 if (IEM_IS_MODRM_REG_MODE(bRm))
10662 {
10663 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10664 {
10665 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
10666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10667 IEM_MC_LOCAL(uint32_t, u32Value);
10668 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10669 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10670 IEM_MC_ADVANCE_RIP_AND_FINISH();
10671 IEM_MC_END();
10672 }
10673 else
10674 {
10675 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10677 IEM_MC_LOCAL(uint64_t, u64Value);
10678 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10679 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10680 IEM_MC_ADVANCE_RIP_AND_FINISH();
10681 IEM_MC_END();
10682 }
10683 }
10684 else
10685 {
10686 /*
10687 * We're loading a register from memory.
10688 */
10689 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10690 {
10691 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
10692 IEM_MC_LOCAL(uint32_t, u32Value);
10693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10696 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10697 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10698 IEM_MC_ADVANCE_RIP_AND_FINISH();
10699 IEM_MC_END();
10700 }
10701 else
10702 {
10703 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
10704 IEM_MC_LOCAL(uint64_t, u64Value);
10705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10708 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10709 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10710 IEM_MC_ADVANCE_RIP_AND_FINISH();
10711 IEM_MC_END();
10712 }
10713 }
10714}
10715
10716
10717/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10718FNIEMOP_UD_STUB(iemOp_jmpe);
10719
10720
10721/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
10722FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10723{
10724 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10725 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10726 return iemOp_InvalidNeedRM(pVCpu);
10727#ifndef TST_IEM_CHECK_MC
10728# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10729 static const IEMOPBINSIZES s_Native =
10730 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10731# endif
10732 static const IEMOPBINSIZES s_Fallback =
10733 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10734#endif
10735 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10736 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_NOT_286_OR_OLDER);
10737}
10738
10739
10740/**
10741 * @opcode 0xb9
10742 * @opinvalid intel-modrm
10743 * @optest ->
10744 */
10745FNIEMOP_DEF(iemOp_Grp10)
10746{
10747 /*
10748 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10749 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10750 */
10751 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10752 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10753 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10754}
10755
10756
10757/**
10758 * Body for group 8 bit instruction.
10759 */
10760#define IEMOP_BODY_BIT_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10761 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10762 \
10763 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10764 { \
10765 /* register destination. */ \
10766 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10767 \
10768 switch (pVCpu->iem.s.enmEffOpSize) \
10769 { \
10770 case IEMMODE_16BIT: \
10771 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
10772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10773 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10774 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10775 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10776 \
10777 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10778 IEM_MC_REF_EFLAGS(pEFlags); \
10779 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10780 \
10781 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10782 IEM_MC_END(); \
10783 break; \
10784 \
10785 case IEMMODE_32BIT: \
10786 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
10787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10788 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10789 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10790 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10791 \
10792 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10793 IEM_MC_REF_EFLAGS(pEFlags); \
10794 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10795 \
10796 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
10797 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10798 IEM_MC_END(); \
10799 break; \
10800 \
10801 case IEMMODE_64BIT: \
10802 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
10803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10804 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10805 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10806 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10807 \
10808 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10809 IEM_MC_REF_EFLAGS(pEFlags); \
10810 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10811 \
10812 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10813 IEM_MC_END(); \
10814 break; \
10815 \
10816 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10817 } \
10818 } \
10819 else \
10820 { \
10821 /* memory destination. */ \
10822 /** @todo test negative bit offsets! */ \
10823 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
10824 { \
10825 switch (pVCpu->iem.s.enmEffOpSize) \
10826 { \
10827 case IEMMODE_16BIT: \
10828 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
10829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10831 \
10832 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10833 IEMOP_HLP_DONE_DECODING(); \
10834 \
10835 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10836 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10837 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10838 \
10839 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10840 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10841 IEM_MC_FETCH_EFLAGS(EFlags); \
10842 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10843 \
10844 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10845 IEM_MC_COMMIT_EFLAGS(EFlags); \
10846 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10847 IEM_MC_END(); \
10848 break; \
10849 \
10850 case IEMMODE_32BIT: \
10851 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
10852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10854 \
10855 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10856 IEMOP_HLP_DONE_DECODING(); \
10857 \
10858 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10859 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10860 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10861 \
10862 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10863 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10864 IEM_MC_FETCH_EFLAGS(EFlags); \
10865 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10866 \
10867 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10868 IEM_MC_COMMIT_EFLAGS(EFlags); \
10869 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10870 IEM_MC_END(); \
10871 break; \
10872 \
10873 case IEMMODE_64BIT: \
10874 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
10875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10877 \
10878 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10879 IEMOP_HLP_DONE_DECODING(); \
10880 \
10881 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10882 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10883 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10884 \
10885 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10886 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10887 IEM_MC_FETCH_EFLAGS(EFlags); \
10888 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10889 \
10890 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
10891 IEM_MC_COMMIT_EFLAGS(EFlags); \
10892 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10893 IEM_MC_END(); \
10894 break; \
10895 \
10896 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10897 } \
10898 } \
10899 else \
10900 { \
10901 (void)0
10902/* Separate macro to work around parsing issue in IEMAllInstPython.py */
10903#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
10904 switch (pVCpu->iem.s.enmEffOpSize) \
10905 { \
10906 case IEMMODE_16BIT: \
10907 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
10908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10910 \
10911 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10912 IEMOP_HLP_DONE_DECODING(); \
10913 \
10914 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10915 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10916 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10917 \
10918 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10919 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10920 IEM_MC_FETCH_EFLAGS(EFlags); \
10921 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
10922 \
10923 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10924 IEM_MC_COMMIT_EFLAGS(EFlags); \
10925 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10926 IEM_MC_END(); \
10927 break; \
10928 \
10929 case IEMMODE_32BIT: \
10930 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
10931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10933 \
10934 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10935 IEMOP_HLP_DONE_DECODING(); \
10936 \
10937 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10938 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10939 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10940 \
10941 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10942 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10943 IEM_MC_FETCH_EFLAGS(EFlags); \
10944 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
10945 \
10946 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10947 IEM_MC_COMMIT_EFLAGS(EFlags); \
10948 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10949 IEM_MC_END(); \
10950 break; \
10951 \
10952 case IEMMODE_64BIT: \
10953 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
10954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10956 \
10957 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10958 IEMOP_HLP_DONE_DECODING(); \
10959 \
10960 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10961 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10962 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10963 \
10964 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10965 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10966 IEM_MC_FETCH_EFLAGS(EFlags); \
10967 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
10968 \
10969 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
10970 IEM_MC_COMMIT_EFLAGS(EFlags); \
10971 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10972 IEM_MC_END(); \
10973 break; \
10974 \
10975 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10976 } \
10977 } \
10978 } \
10979 (void)0
10980
10981/* Read-only version (bt) */
10982#define IEMOP_BODY_BIT_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10983 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10984 \
10985 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10986 { \
10987 /* register destination. */ \
10988 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10989 \
10990 switch (pVCpu->iem.s.enmEffOpSize) \
10991 { \
10992 case IEMMODE_16BIT: \
10993 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
10994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10995 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
10996 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10997 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10998 \
10999 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11000 IEM_MC_REF_EFLAGS(pEFlags); \
11001 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
11002 \
11003 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11004 IEM_MC_END(); \
11005 break; \
11006 \
11007 case IEMMODE_32BIT: \
11008 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
11009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11010 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
11011 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
11012 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11013 \
11014 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11015 IEM_MC_REF_EFLAGS(pEFlags); \
11016 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
11017 \
11018 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11019 IEM_MC_END(); \
11020 break; \
11021 \
11022 case IEMMODE_64BIT: \
11023 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
11024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11025 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
11026 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
11027 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11028 \
11029 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11030 IEM_MC_REF_EFLAGS(pEFlags); \
11031 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
11032 \
11033 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11034 IEM_MC_END(); \
11035 break; \
11036 \
11037 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11038 } \
11039 } \
11040 else \
11041 { \
11042 /* memory destination. */ \
11043 /** @todo test negative bit offsets! */ \
11044 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
11045 { \
11046 switch (pVCpu->iem.s.enmEffOpSize) \
11047 { \
11048 case IEMMODE_16BIT: \
11049 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
11050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11052 \
11053 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11054 IEMOP_HLP_DONE_DECODING(); \
11055 \
11056 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11057 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
11058 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11059 \
11060 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
11061 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11062 IEM_MC_FETCH_EFLAGS(EFlags); \
11063 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
11064 \
11065 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
11066 IEM_MC_COMMIT_EFLAGS(EFlags); \
11067 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11068 IEM_MC_END(); \
11069 break; \
11070 \
11071 case IEMMODE_32BIT: \
11072 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
11073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11075 \
11076 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11077 IEMOP_HLP_DONE_DECODING(); \
11078 \
11079 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11080 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
11081 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11082 \
11083 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
11084 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11085 IEM_MC_FETCH_EFLAGS(EFlags); \
11086 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
11087 \
11088 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
11089 IEM_MC_COMMIT_EFLAGS(EFlags); \
11090 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11091 IEM_MC_END(); \
11092 break; \
11093 \
11094 case IEMMODE_64BIT: \
11095 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
11096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11098 \
11099 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11100 IEMOP_HLP_DONE_DECODING(); \
11101 \
11102 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11103 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
11104 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11105 \
11106 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
11107 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11108 IEM_MC_FETCH_EFLAGS(EFlags); \
11109 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
11110 \
11111 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
11112 IEM_MC_COMMIT_EFLAGS(EFlags); \
11113 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11114 IEM_MC_END(); \
11115 break; \
11116 \
11117 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11118 } \
11119 } \
11120 else \
11121 { \
11122 IEMOP_HLP_DONE_DECODING(); \
11123 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
11124 } \
11125 } \
11126 (void)0
11127
11128
11129/** Opcode 0x0f 0xba /4. */
11130FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
11131{
11132 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
11133 IEMOP_BODY_BIT_Ev_Ib_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
11134}
11135
11136
11137/** Opcode 0x0f 0xba /5. */
11138FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
11139{
11140 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
11141 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
11142 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
11143}
11144
11145
11146/** Opcode 0x0f 0xba /6. */
11147FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
11148{
11149 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
11150 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
11151 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
11152}
11153
11154
11155/** Opcode 0x0f 0xba /7. */
11156FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
11157{
11158 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
11159 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11160 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11161}
11162
11163
11164/** Opcode 0x0f 0xba. */
11165FNIEMOP_DEF(iemOp_Grp8)
11166{
11167 IEMOP_HLP_MIN_386();
11168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11169 switch (IEM_GET_MODRM_REG_8(bRm))
11170 {
11171 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
11172 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
11173 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
11174 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
11175
11176 case 0: case 1: case 2: case 3:
11177 /* Both AMD and Intel want full modr/m decoding and imm8. */
11178 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
11179
11180 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11181 }
11182}
11183
11184
11185/** Opcode 0x0f 0xbb. */
11186FNIEMOP_DEF(iemOp_btc_Ev_Gv)
11187{
11188 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
11189 IEMOP_HLP_MIN_386();
11190 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11191 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11192}
11193
11194
11195/**
11196 * Common worker for BSF and BSR instructions.
11197 *
11198 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
11199 * the destination register, which means that for 32-bit operations the high
11200 * bits must be left alone.
11201 *
11202 * @param pImpl Pointer to the instruction implementation (assembly).
11203 */
11204FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
11205{
11206 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11207
11208 /*
11209 * If rm is denoting a register, no more instruction bytes.
11210 */
11211 if (IEM_IS_MODRM_REG_MODE(bRm))
11212 {
11213 switch (pVCpu->iem.s.enmEffOpSize)
11214 {
11215 case IEMMODE_16BIT:
11216 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
11217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11218 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11219 IEM_MC_ARG(uint16_t, u16Src, 1);
11220 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11221
11222 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11223 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11224 IEM_MC_REF_EFLAGS(pEFlags);
11225 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11226
11227 IEM_MC_ADVANCE_RIP_AND_FINISH();
11228 IEM_MC_END();
11229 break;
11230
11231 case IEMMODE_32BIT:
11232 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
11233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11234 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11235 IEM_MC_ARG(uint32_t, u32Src, 1);
11236 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11237
11238 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11239 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11240 IEM_MC_REF_EFLAGS(pEFlags);
11241 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11242 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11243 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11244 } IEM_MC_ENDIF();
11245 IEM_MC_ADVANCE_RIP_AND_FINISH();
11246 IEM_MC_END();
11247 break;
11248
11249 case IEMMODE_64BIT:
11250 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
11251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11252 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11253 IEM_MC_ARG(uint64_t, u64Src, 1);
11254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11255
11256 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11257 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11258 IEM_MC_REF_EFLAGS(pEFlags);
11259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11260
11261 IEM_MC_ADVANCE_RIP_AND_FINISH();
11262 IEM_MC_END();
11263 break;
11264
11265 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11266 }
11267 }
11268 else
11269 {
11270 /*
11271 * We're accessing memory.
11272 */
11273 switch (pVCpu->iem.s.enmEffOpSize)
11274 {
11275 case IEMMODE_16BIT:
11276 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
11277 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11278 IEM_MC_ARG(uint16_t, u16Src, 1);
11279 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11281
11282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11284 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11285 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11286 IEM_MC_REF_EFLAGS(pEFlags);
11287 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11288
11289 IEM_MC_ADVANCE_RIP_AND_FINISH();
11290 IEM_MC_END();
11291 break;
11292
11293 case IEMMODE_32BIT:
11294 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
11295 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11296 IEM_MC_ARG(uint32_t, u32Src, 1);
11297 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11298 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11299
11300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11302 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11303 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11304 IEM_MC_REF_EFLAGS(pEFlags);
11305 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11306
11307 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11308 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11309 } IEM_MC_ENDIF();
11310 IEM_MC_ADVANCE_RIP_AND_FINISH();
11311 IEM_MC_END();
11312 break;
11313
11314 case IEMMODE_64BIT:
11315 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
11316 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11317 IEM_MC_ARG(uint64_t, u64Src, 1);
11318 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11320
11321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11323 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11324 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11325 IEM_MC_REF_EFLAGS(pEFlags);
11326 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11327
11328 IEM_MC_ADVANCE_RIP_AND_FINISH();
11329 IEM_MC_END();
11330 break;
11331
11332 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11333 }
11334 }
11335}
11336
11337
11338/** Opcode 0x0f 0xbc. */
11339FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11340{
11341 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11342 IEMOP_HLP_MIN_386();
11343 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11344 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
11345}
11346
11347
11348/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
11349FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11350{
11351 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11352 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11353 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11354
11355#ifndef TST_IEM_CHECK_MC
11356 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
11357 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11358 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
11359 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11360 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
11361 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11362 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11363 {
11364 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11365 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11366 };
11367#endif
11368 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11369 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11370 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11371 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_NOT_286_OR_OLDER);
11372}
11373
11374
11375/** Opcode 0x0f 0xbd. */
11376FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11377{
11378 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11379 IEMOP_HLP_MIN_386();
11380 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11381 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
11382}
11383
11384
11385/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
11386FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11387{
11388 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAbm)
11389 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11390 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11391
11392#ifndef TST_IEM_CHECK_MC
11393 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11394 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11395 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11396 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11397 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11398 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11399 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11400 {
11401 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11402 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11403 };
11404#endif
11405 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11406 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11407 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11408 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_NOT_286_OR_OLDER);
11409}
11410
11411
11412
11413/** Opcode 0x0f 0xbe. */
11414FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11415{
11416 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11417 IEMOP_HLP_MIN_386();
11418
11419 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11420
11421 /*
11422 * If rm is denoting a register, no more instruction bytes.
11423 */
11424 if (IEM_IS_MODRM_REG_MODE(bRm))
11425 {
11426 switch (pVCpu->iem.s.enmEffOpSize)
11427 {
11428 case IEMMODE_16BIT:
11429 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
11430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11431 IEM_MC_LOCAL(uint16_t, u16Value);
11432 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11433 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11434 IEM_MC_ADVANCE_RIP_AND_FINISH();
11435 IEM_MC_END();
11436 break;
11437
11438 case IEMMODE_32BIT:
11439 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
11440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11441 IEM_MC_LOCAL(uint32_t, u32Value);
11442 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11443 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11444 IEM_MC_ADVANCE_RIP_AND_FINISH();
11445 IEM_MC_END();
11446 break;
11447
11448 case IEMMODE_64BIT:
11449 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
11450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11451 IEM_MC_LOCAL(uint64_t, u64Value);
11452 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11453 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11454 IEM_MC_ADVANCE_RIP_AND_FINISH();
11455 IEM_MC_END();
11456 break;
11457
11458 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11459 }
11460 }
11461 else
11462 {
11463 /*
11464 * We're loading a register from memory.
11465 */
11466 switch (pVCpu->iem.s.enmEffOpSize)
11467 {
11468 case IEMMODE_16BIT:
11469 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
11470 IEM_MC_LOCAL(uint16_t, u16Value);
11471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11474 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11475 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11476 IEM_MC_ADVANCE_RIP_AND_FINISH();
11477 IEM_MC_END();
11478 break;
11479
11480 case IEMMODE_32BIT:
11481 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
11482 IEM_MC_LOCAL(uint32_t, u32Value);
11483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11486 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11487 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11488 IEM_MC_ADVANCE_RIP_AND_FINISH();
11489 IEM_MC_END();
11490 break;
11491
11492 case IEMMODE_64BIT:
11493 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
11494 IEM_MC_LOCAL(uint64_t, u64Value);
11495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11498 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11499 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11500 IEM_MC_ADVANCE_RIP_AND_FINISH();
11501 IEM_MC_END();
11502 break;
11503
11504 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11505 }
11506 }
11507}
11508
11509
11510/** Opcode 0x0f 0xbf. */
11511FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11512{
11513 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11514 IEMOP_HLP_MIN_386();
11515
11516 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11517
11518 /** @todo Not entirely sure how the operand size prefix is handled here,
11519 * assuming that it will be ignored. Would be nice to have a few
11520 * test for this. */
11521 /*
11522 * If rm is denoting a register, no more instruction bytes.
11523 */
11524 if (IEM_IS_MODRM_REG_MODE(bRm))
11525 {
11526 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11527 {
11528 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
11529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11530 IEM_MC_LOCAL(uint32_t, u32Value);
11531 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11532 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11533 IEM_MC_ADVANCE_RIP_AND_FINISH();
11534 IEM_MC_END();
11535 }
11536 else
11537 {
11538 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
11539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11540 IEM_MC_LOCAL(uint64_t, u64Value);
11541 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11542 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11543 IEM_MC_ADVANCE_RIP_AND_FINISH();
11544 IEM_MC_END();
11545 }
11546 }
11547 else
11548 {
11549 /*
11550 * We're loading a register from memory.
11551 */
11552 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11553 {
11554 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
11555 IEM_MC_LOCAL(uint32_t, u32Value);
11556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11559 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11560 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11561 IEM_MC_ADVANCE_RIP_AND_FINISH();
11562 IEM_MC_END();
11563 }
11564 else
11565 {
11566 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
11567 IEM_MC_LOCAL(uint64_t, u64Value);
11568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11571 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11572 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11573 IEM_MC_ADVANCE_RIP_AND_FINISH();
11574 IEM_MC_END();
11575 }
11576 }
11577}
11578
11579
11580/** Opcode 0x0f 0xc0. */
11581FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11582{
11583 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11584 IEMOP_HLP_MIN_486();
11585 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11586
11587 /*
11588 * If rm is denoting a register, no more instruction bytes.
11589 */
11590 if (IEM_IS_MODRM_REG_MODE(bRm))
11591 {
11592 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486, 0);
11593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11594 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11595 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11596 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11597
11598 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11599 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11600 IEM_MC_REF_EFLAGS(pEFlags);
11601 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11602
11603 IEM_MC_ADVANCE_RIP_AND_FINISH();
11604 IEM_MC_END();
11605 }
11606 else
11607 {
11608 /*
11609 * We're accessing memory.
11610 */
11611#define IEMOP_BODY_XADD_BYTE(a_fnWorker, a_Type) \
11612 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0); \
11613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11615 IEMOP_HLP_DONE_DECODING(); \
11616 \
11617 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11618 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11619 IEM_MC_MEM_MAP_U8_##a_Type(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11620 \
11621 IEM_MC_LOCAL(uint8_t, u8RegCopy); \
11622 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11623 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, u8RegCopy, 1); \
11624 \
11625 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11626 IEM_MC_FETCH_EFLAGS(EFlags); \
11627 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker, pu8Dst, pu8Reg, pEFlags); \
11628 \
11629 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11630 IEM_MC_COMMIT_EFLAGS(EFlags); \
11631 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy); \
11632 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11633 IEM_MC_END()
11634 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11635 {
11636 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8,RW);
11637 }
11638 else
11639 {
11640 IEMOP_BODY_XADD_BYTE(iemAImpl_xadd_u8_locked,ATOMIC);
11641 }
11642 }
11643}
11644
11645
11646/** Opcode 0x0f 0xc1. */
11647FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11648{
11649 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11650 IEMOP_HLP_MIN_486();
11651 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11652
11653 /*
11654 * If rm is denoting a register, no more instruction bytes.
11655 */
11656 if (IEM_IS_MODRM_REG_MODE(bRm))
11657 {
11658 switch (pVCpu->iem.s.enmEffOpSize)
11659 {
11660 case IEMMODE_16BIT:
11661 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486, 0);
11662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11663 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11664 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11665 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11666
11667 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11668 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11669 IEM_MC_REF_EFLAGS(pEFlags);
11670 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11671
11672 IEM_MC_ADVANCE_RIP_AND_FINISH();
11673 IEM_MC_END();
11674 break;
11675
11676 case IEMMODE_32BIT:
11677 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486, 0);
11678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11679 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11680 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11681 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11682
11683 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11684 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11685 IEM_MC_REF_EFLAGS(pEFlags);
11686 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11687
11688 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
11689 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11690 IEM_MC_ADVANCE_RIP_AND_FINISH();
11691 IEM_MC_END();
11692 break;
11693
11694 case IEMMODE_64BIT:
11695 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
11696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11697 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11698 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11699 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11700
11701 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11702 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11703 IEM_MC_REF_EFLAGS(pEFlags);
11704 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11705
11706 IEM_MC_ADVANCE_RIP_AND_FINISH();
11707 IEM_MC_END();
11708 break;
11709
11710 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11711 }
11712 }
11713 else
11714 {
11715 /*
11716 * We're accessing memory.
11717 */
11718#define IEMOP_BODY_XADD_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
11719 do { \
11720 switch (pVCpu->iem.s.enmEffOpSize) \
11721 { \
11722 case IEMMODE_16BIT: \
11723 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0); \
11724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11726 IEMOP_HLP_DONE_DECODING(); \
11727 \
11728 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11729 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11730 IEM_MC_MEM_MAP_U16_##a_Type(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11731 \
11732 IEM_MC_LOCAL(uint16_t, u16RegCopy); \
11733 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11734 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, u16RegCopy, 1); \
11735 \
11736 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11737 IEM_MC_FETCH_EFLAGS(EFlags); \
11738 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker16, pu16Dst, pu16Reg, pEFlags); \
11739 \
11740 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11741 IEM_MC_COMMIT_EFLAGS(EFlags); \
11742 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy); \
11743 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11744 IEM_MC_END(); \
11745 break; \
11746 \
11747 case IEMMODE_32BIT: \
11748 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0); \
11749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11751 IEMOP_HLP_DONE_DECODING(); \
11752 \
11753 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11754 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11755 IEM_MC_MEM_MAP_U32_##a_Type(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11756 \
11757 IEM_MC_LOCAL(uint32_t, u32RegCopy); \
11758 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11759 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, u32RegCopy, 1); \
11760 \
11761 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11762 IEM_MC_FETCH_EFLAGS(EFlags); \
11763 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker32, pu32Dst, pu32Reg, pEFlags); \
11764 \
11765 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11766 IEM_MC_COMMIT_EFLAGS(EFlags); \
11767 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy); \
11768 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11769 IEM_MC_END(); \
11770 break; \
11771 \
11772 case IEMMODE_64BIT: \
11773 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0); \
11774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11776 IEMOP_HLP_DONE_DECODING(); \
11777 \
11778 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11779 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11780 IEM_MC_MEM_MAP_U64_##a_Type(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11781 \
11782 IEM_MC_LOCAL(uint64_t, u64RegCopy); \
11783 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm)); \
11784 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, u64RegCopy, 1); \
11785 \
11786 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11787 IEM_MC_FETCH_EFLAGS(EFlags); \
11788 IEM_MC_CALL_VOID_AIMPL_3(a_fnWorker64, pu64Dst, pu64Reg, pEFlags); \
11789 \
11790 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
11791 IEM_MC_COMMIT_EFLAGS(EFlags); \
11792 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy); \
11793 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11794 IEM_MC_END(); \
11795 break; \
11796 \
11797 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11798 } \
11799 } while (0)
11800
11801 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11802 {
11803 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16, iemAImpl_xadd_u32, iemAImpl_xadd_u64,RW);
11804 }
11805 else
11806 {
11807 IEMOP_BODY_XADD_EV_GV(iemAImpl_xadd_u16_locked, iemAImpl_xadd_u32_locked, iemAImpl_xadd_u64_locked,ATOMIC);
11808 }
11809 }
11810}
11811
11812
11813/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11814FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11815{
11816 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11817
11818 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11819 if (IEM_IS_MODRM_REG_MODE(bRm))
11820 {
11821 /*
11822 * XMM, XMM.
11823 */
11824 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
11825 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11827 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11828 IEM_MC_LOCAL(X86XMMREG, Dst);
11829 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11830 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11831 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11832 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11833 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11834 IEM_MC_PREPARE_SSE_USAGE();
11835 IEM_MC_REF_MXCSR(pfMxcsr);
11836 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11837 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11838 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11839 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11840 } IEM_MC_ELSE() {
11841 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11842 } IEM_MC_ENDIF();
11843
11844 IEM_MC_ADVANCE_RIP_AND_FINISH();
11845 IEM_MC_END();
11846 }
11847 else
11848 {
11849 /*
11850 * XMM, [mem128].
11851 */
11852 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
11853 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11854 IEM_MC_LOCAL(X86XMMREG, Dst);
11855 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11856 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11857 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11859
11860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11861 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11862 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11864 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11865 IEM_MC_PREPARE_SSE_USAGE();
11866
11867 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11868 IEM_MC_REF_MXCSR(pfMxcsr);
11869 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11870 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11871 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11872 } IEM_MC_ELSE() {
11873 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11874 } IEM_MC_ENDIF();
11875
11876 IEM_MC_ADVANCE_RIP_AND_FINISH();
11877 IEM_MC_END();
11878 }
11879}
11880
11881
11882/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11883FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11884{
11885 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11886
11887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11888 if (IEM_IS_MODRM_REG_MODE(bRm))
11889 {
11890 /*
11891 * XMM, XMM.
11892 */
11893 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
11894 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11896 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11897 IEM_MC_LOCAL(X86XMMREG, Dst);
11898 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11899 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11900 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11901 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11902 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11903 IEM_MC_PREPARE_SSE_USAGE();
11904 IEM_MC_REF_MXCSR(pfMxcsr);
11905 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11906 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11907 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11908 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11909 } IEM_MC_ELSE() {
11910 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11911 } IEM_MC_ENDIF();
11912
11913 IEM_MC_ADVANCE_RIP_AND_FINISH();
11914 IEM_MC_END();
11915 }
11916 else
11917 {
11918 /*
11919 * XMM, [mem128].
11920 */
11921 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
11922 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11923 IEM_MC_LOCAL(X86XMMREG, Dst);
11924 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11925 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11926 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11927 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11928
11929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11930 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11931 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11933 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11934 IEM_MC_PREPARE_SSE_USAGE();
11935
11936 IEM_MC_REF_MXCSR(pfMxcsr);
11937 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11938 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11939 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11940 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11941 } IEM_MC_ELSE() {
11942 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11943 } IEM_MC_ENDIF();
11944
11945 IEM_MC_ADVANCE_RIP_AND_FINISH();
11946 IEM_MC_END();
11947 }
11948}
11949
11950
11951/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11952FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11953{
11954 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11955
11956 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11957 if (IEM_IS_MODRM_REG_MODE(bRm))
11958 {
11959 /*
11960 * XMM32, XMM32.
11961 */
11962 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
11963 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11965 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11966 IEM_MC_LOCAL(X86XMMREG, Dst);
11967 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11968 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11969 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11970 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11971 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11972 IEM_MC_PREPARE_SSE_USAGE();
11973 IEM_MC_REF_MXCSR(pfMxcsr);
11974 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
11975 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11976 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11977 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11978 } IEM_MC_ELSE() {
11979 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11980 } IEM_MC_ENDIF();
11981
11982 IEM_MC_ADVANCE_RIP_AND_FINISH();
11983 IEM_MC_END();
11984 }
11985 else
11986 {
11987 /*
11988 * XMM32, [mem32].
11989 */
11990 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
11991 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11992 IEM_MC_LOCAL(X86XMMREG, Dst);
11993 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11994 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11995 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11996 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11997
11998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11999 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12000 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12002 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12003 IEM_MC_PREPARE_SSE_USAGE();
12004
12005 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
12006 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12007 IEM_MC_REF_MXCSR(pfMxcsr);
12008 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
12009 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12010 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12011 } IEM_MC_ELSE() {
12012 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
12013 } IEM_MC_ENDIF();
12014
12015 IEM_MC_ADVANCE_RIP_AND_FINISH();
12016 IEM_MC_END();
12017 }
12018}
12019
12020
12021/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
12022FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
12023{
12024 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12025
12026 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12027 if (IEM_IS_MODRM_REG_MODE(bRm))
12028 {
12029 /*
12030 * XMM64, XMM64.
12031 */
12032 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12033 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12035 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12036 IEM_MC_LOCAL(X86XMMREG, Dst);
12037 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12038 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12039 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12040 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12041 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12042 IEM_MC_PREPARE_SSE_USAGE();
12043 IEM_MC_REF_MXCSR(pfMxcsr);
12044 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
12045 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
12046 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12047 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12048 } IEM_MC_ELSE() {
12049 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
12050 } IEM_MC_ENDIF();
12051
12052 IEM_MC_ADVANCE_RIP_AND_FINISH();
12053 IEM_MC_END();
12054 }
12055 else
12056 {
12057 /*
12058 * XMM64, [mem64].
12059 */
12060 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
12061 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12062 IEM_MC_LOCAL(X86XMMREG, Dst);
12063 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12064 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12065 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12067
12068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12069 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12070 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12072 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12073 IEM_MC_PREPARE_SSE_USAGE();
12074
12075 IEM_MC_REF_MXCSR(pfMxcsr);
12076 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
12077 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12078 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
12079 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12080 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12081 } IEM_MC_ELSE() {
12082 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
12083 } IEM_MC_ENDIF();
12084
12085 IEM_MC_ADVANCE_RIP_AND_FINISH();
12086 IEM_MC_END();
12087 }
12088}
12089
12090
12091/** Opcode 0x0f 0xc3. */
12092FNIEMOP_DEF(iemOp_movnti_My_Gy)
12093{
12094 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
12095
12096 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12097
12098 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
12099 if (IEM_IS_MODRM_MEM_MODE(bRm))
12100 {
12101 switch (pVCpu->iem.s.enmEffOpSize)
12102 {
12103 case IEMMODE_32BIT:
12104 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
12105 IEM_MC_LOCAL(uint32_t, u32Value);
12106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12107
12108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12110
12111 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12112 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
12113 IEM_MC_ADVANCE_RIP_AND_FINISH();
12114 IEM_MC_END();
12115 break;
12116
12117 case IEMMODE_64BIT:
12118 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
12119 IEM_MC_LOCAL(uint64_t, u64Value);
12120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12121
12122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12124
12125 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12126 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
12127 IEM_MC_ADVANCE_RIP_AND_FINISH();
12128 IEM_MC_END();
12129 break;
12130
12131 case IEMMODE_16BIT:
12132 /** @todo check this form. */
12133 IEMOP_RAISE_INVALID_OPCODE_RET();
12134
12135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12136 }
12137 }
12138 else
12139 IEMOP_RAISE_INVALID_OPCODE_RET();
12140}
12141
12142
12143/* Opcode 0x66 0x0f 0xc3 - invalid */
12144/* Opcode 0xf3 0x0f 0xc3 - invalid */
12145/* Opcode 0xf2 0x0f 0xc3 - invalid */
12146
12147
12148/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
12149FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
12150{
12151 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12153 if (IEM_IS_MODRM_REG_MODE(bRm))
12154 {
12155 /*
12156 * Register, register.
12157 */
12158 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12159 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12161 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12162 IEM_MC_ARG(uint16_t, u16Src, 1);
12163 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12164 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12165 IEM_MC_PREPARE_FPU_USAGE();
12166 IEM_MC_FPU_TO_MMX_MODE();
12167 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
12168 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
12169 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
12170 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
12171 IEM_MC_ADVANCE_RIP_AND_FINISH();
12172 IEM_MC_END();
12173 }
12174 else
12175 {
12176 /*
12177 * Register, memory.
12178 */
12179 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
12180 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12181 IEM_MC_ARG(uint16_t, u16Src, 1);
12182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12183
12184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12185 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12186 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12188 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12189 IEM_MC_PREPARE_FPU_USAGE();
12190 IEM_MC_FPU_TO_MMX_MODE();
12191
12192 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12193 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
12194 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
12195 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
12196 IEM_MC_ADVANCE_RIP_AND_FINISH();
12197 IEM_MC_END();
12198 }
12199}
12200
12201
12202/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
12203FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
12204{
12205 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12206 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12207 if (IEM_IS_MODRM_REG_MODE(bRm))
12208 {
12209 /*
12210 * Register, register.
12211 */
12212 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12213 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12215 IEM_MC_ARG(PRTUINT128U, puDst, 0);
12216 IEM_MC_ARG(uint16_t, u16Src, 1);
12217 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12218 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12219 IEM_MC_PREPARE_SSE_USAGE();
12220 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
12221 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12222 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
12223 IEM_MC_ADVANCE_RIP_AND_FINISH();
12224 IEM_MC_END();
12225 }
12226 else
12227 {
12228 /*
12229 * Register, memory.
12230 */
12231 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12232 IEM_MC_ARG(PRTUINT128U, puDst, 0);
12233 IEM_MC_ARG(uint16_t, u16Src, 1);
12234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12235
12236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12237 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12238 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12240 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12241 IEM_MC_PREPARE_SSE_USAGE();
12242
12243 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12244 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12245 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
12246 IEM_MC_ADVANCE_RIP_AND_FINISH();
12247 IEM_MC_END();
12248 }
12249}
12250
12251
12252/* Opcode 0xf3 0x0f 0xc4 - invalid */
12253/* Opcode 0xf2 0x0f 0xc4 - invalid */
12254
12255
12256/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
12257FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
12258{
12259 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
12260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12261 if (IEM_IS_MODRM_REG_MODE(bRm))
12262 {
12263 /*
12264 * Greg32, MMX, imm8.
12265 */
12266 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
12267 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12269 IEM_MC_LOCAL(uint16_t, u16Dst);
12270 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
12271 IEM_MC_ARG(uint64_t, u64Src, 1);
12272 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12273 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12274 IEM_MC_PREPARE_FPU_USAGE();
12275 IEM_MC_FPU_TO_MMX_MODE();
12276 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
12277 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u64, pu16Dst, u64Src, bImmArg);
12278 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12279 IEM_MC_ADVANCE_RIP_AND_FINISH();
12280 IEM_MC_END();
12281 }
12282 /* No memory operand. */
12283 else
12284 IEMOP_RAISE_INVALID_OPCODE_RET();
12285}
12286
12287
12288/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
12289FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
12290{
12291 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12292 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12293 if (IEM_IS_MODRM_REG_MODE(bRm))
12294 {
12295 /*
12296 * Greg32, XMM, imm8.
12297 */
12298 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
12299 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12301 IEM_MC_LOCAL(uint16_t, u16Dst);
12302 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
12303 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12304 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12305 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12306 IEM_MC_PREPARE_SSE_USAGE();
12307 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12308 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u128, pu16Dst, puSrc, bImmArg);
12309 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12310 IEM_MC_ADVANCE_RIP_AND_FINISH();
12311 IEM_MC_END();
12312 }
12313 /* No memory operand. */
12314 else
12315 IEMOP_RAISE_INVALID_OPCODE_RET();
12316}
12317
12318
12319/* Opcode 0xf3 0x0f 0xc5 - invalid */
12320/* Opcode 0xf2 0x0f 0xc5 - invalid */
12321
12322
12323/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
12324FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
12325{
12326 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12327 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12328 if (IEM_IS_MODRM_REG_MODE(bRm))
12329 {
12330 /*
12331 * XMM, XMM, imm8.
12332 */
12333 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12334 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12336 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12337 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12338 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12339 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12340 IEM_MC_PREPARE_SSE_USAGE();
12341 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12342 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12343 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12344 IEM_MC_ADVANCE_RIP_AND_FINISH();
12345 IEM_MC_END();
12346 }
12347 else
12348 {
12349 /*
12350 * XMM, [mem128], imm8.
12351 */
12352 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12353 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12354 IEM_MC_LOCAL(RTUINT128U, uSrc);
12355 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12357
12358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12359 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12360 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12362 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12363 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12364
12365 IEM_MC_PREPARE_SSE_USAGE();
12366 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12367 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12368
12369 IEM_MC_ADVANCE_RIP_AND_FINISH();
12370 IEM_MC_END();
12371 }
12372}
12373
12374
12375/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12376FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12377{
12378 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12379 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12380 if (IEM_IS_MODRM_REG_MODE(bRm))
12381 {
12382 /*
12383 * XMM, XMM, imm8.
12384 */
12385 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12386 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12388 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12389 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12390 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12391 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12392 IEM_MC_PREPARE_SSE_USAGE();
12393 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12394 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12395 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12396 IEM_MC_ADVANCE_RIP_AND_FINISH();
12397 IEM_MC_END();
12398 }
12399 else
12400 {
12401 /*
12402 * XMM, [mem128], imm8.
12403 */
12404 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12405 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12406 IEM_MC_LOCAL(RTUINT128U, uSrc);
12407 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12409
12410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12411 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12412 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12414 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12415 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12416
12417 IEM_MC_PREPARE_SSE_USAGE();
12418 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12419 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12420
12421 IEM_MC_ADVANCE_RIP_AND_FINISH();
12422 IEM_MC_END();
12423 }
12424}
12425
12426
12427/* Opcode 0xf3 0x0f 0xc6 - invalid */
12428/* Opcode 0xf2 0x0f 0xc6 - invalid */
12429
12430
12431/** Opcode 0x0f 0xc7 !11/1. */
12432FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12433{
12434 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12435#define IEMOP_BODY_CMPXCHG8B(a_fnWorker, a_Type) \
12436 IEM_MC_BEGIN(4, 5, IEM_MC_F_NOT_286_OR_OLDER, 0); \
12437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12439 IEMOP_HLP_DONE_DECODING_EX(fCmpXchg8b); \
12440 \
12441 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12442 IEM_MC_ARG(uint64_t *, pu64MemDst, 0); \
12443 IEM_MC_MEM_MAP_U64_##a_Type(pu64MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12444 \
12445 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx); \
12446 IEM_MC_FETCH_GREG_PAIR_U32(u64EaxEdx, X86_GREG_xAX, X86_GREG_xDX); \
12447 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EaxEdx, u64EaxEdx, 1); \
12448 \
12449 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx); \
12450 IEM_MC_FETCH_GREG_PAIR_U32(u64EbxEcx, X86_GREG_xBX, X86_GREG_xCX); \
12451 IEM_MC_ARG_LOCAL_REF(PRTUINT64U, pu64EbxEcx, u64EbxEcx, 2); \
12452 \
12453 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
12454 IEM_MC_FETCH_EFLAGS(EFlags); \
12455 IEM_MC_CALL_VOID_AIMPL_4(a_fnWorker, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags); \
12456 \
12457 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12458 IEM_MC_COMMIT_EFLAGS(EFlags); \
12459 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12460 IEM_MC_STORE_GREG_PAIR_U32(X86_GREG_xAX, X86_GREG_xDX, u64EaxEdx); \
12461 } IEM_MC_ENDIF(); \
12462 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12463 \
12464 IEM_MC_END()
12465 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12466 {
12467 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b,RW);
12468 }
12469 else
12470 {
12471 IEMOP_BODY_CMPXCHG8B(iemAImpl_cmpxchg8b_locked,ATOMIC);
12472 }
12473}
12474
12475
12476/** Opcode REX.W 0x0f 0xc7 !11/1. */
12477FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12478{
12479 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12480 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12481 {
12482 /*
12483 * This is hairy, very hairy macro fun. We're walking a fine line
12484 * here to make the code parsable by IEMAllInstPython.py and fit into
12485 * the patterns IEMAllThrdPython.py requires for the code morphing.
12486 */
12487#define BODY_CMPXCHG16B_HEAD(bUnmapInfoStmt, a_Type) \
12488 IEM_MC_BEGIN(5, 4, IEM_MC_F_64BIT, 0); \
12489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12491 IEMOP_HLP_DONE_DECODING(); \
12492 \
12493 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16); \
12494 bUnmapInfoStmt; \
12495 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0); \
12496 IEM_MC_MEM_MAP_U128_##a_Type(pu128MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12497 \
12498 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx); \
12499 IEM_MC_FETCH_GREG_PAIR_U64(u128RaxRdx, X86_GREG_xAX, X86_GREG_xDX); \
12500 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RaxRdx, u128RaxRdx, 1); \
12501 \
12502 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx); \
12503 IEM_MC_FETCH_GREG_PAIR_U64(u128RbxRcx, X86_GREG_xBX, X86_GREG_xCX); \
12504 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128RbxRcx, u128RbxRcx, 2); \
12505 \
12506 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); \
12507 IEM_MC_FETCH_EFLAGS(EFlags)
12508
12509#define BODY_CMPXCHG16B_TAIL(a_Type) \
12510 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
12511 IEM_MC_COMMIT_EFLAGS(EFlags); \
12512 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12513 IEM_MC_STORE_GREG_PAIR_U64(X86_GREG_xAX, X86_GREG_xDX, u128RaxRdx); \
12514 } IEM_MC_ENDIF(); \
12515 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12516 IEM_MC_END()
12517
12518#ifdef RT_ARCH_AMD64
12519 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12520 {
12521 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12522 {
12523 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12524 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12525 BODY_CMPXCHG16B_TAIL(RW);
12526 }
12527 else
12528 {
12529 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12530 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12531 BODY_CMPXCHG16B_TAIL(ATOMIC);
12532 }
12533 }
12534 else
12535 { /* (see comments in #else case below) */
12536 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12537 {
12538 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12539 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12540 BODY_CMPXCHG16B_TAIL(RW);
12541 }
12542 else
12543 {
12544 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12545 IEM_MC_CALL_CIMPL_5(IEM_CIMPL_F_STATUS_FLAGS,
12546 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12547 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12548 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx,
12549 pEFlags, bUnmapInfo);
12550 IEM_MC_END();
12551 }
12552 }
12553
12554#elif defined(RT_ARCH_ARM64)
12555 /** @todo may require fallback for unaligned accesses... */
12556 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12557 {
12558 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12559 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12560 BODY_CMPXCHG16B_TAIL(RW);
12561 }
12562 else
12563 {
12564 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),ATOMIC);
12565 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12566 BODY_CMPXCHG16B_TAIL(ATOMIC);
12567 }
12568
12569#else
12570 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12571 accesses and not all all atomic, which works fine on in UNI CPU guest
12572 configuration (ignoring DMA). If guest SMP is active we have no choice
12573 but to use a rendezvous callback here. Sigh. */
12574 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12575 {
12576 BODY_CMPXCHG16B_HEAD(IEM_MC_LOCAL(uint8_t, bUnmapInfo),RW);
12577 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12578 BODY_CMPXCHG16B_TAIL(RW);
12579 }
12580 else
12581 {
12582 BODY_CMPXCHG16B_HEAD(IEM_MC_ARG(uint8_t, bUnmapInfo, 4),RW);
12583 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12584 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
12585 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX),
12586 iemCImpl_cmpxchg16b_fallback_rendezvous,
12587 pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12588 IEM_MC_END();
12589 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12590 }
12591#endif
12592
12593#undef BODY_CMPXCHG16B
12594 }
12595 Log(("cmpxchg16b -> #UD\n"));
12596 IEMOP_RAISE_INVALID_OPCODE_RET();
12597}
12598
12599FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12600{
12601 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12602 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12603 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12604}
12605
12606
12607/** Opcode 0x0f 0xc7 11/6. */
12608FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12609{
12610 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12611 IEMOP_RAISE_INVALID_OPCODE_RET();
12612
12613 if (IEM_IS_MODRM_REG_MODE(bRm))
12614 {
12615 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12617 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12618 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12619 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12620 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12621 iemCImpl_rdrand, iReg, enmEffOpSize);
12622 IEM_MC_END();
12623 }
12624 /* Register only. */
12625 else
12626 IEMOP_RAISE_INVALID_OPCODE_RET();
12627}
12628
12629/** Opcode 0x0f 0xc7 !11/6. */
12630#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12631FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12632{
12633 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12634 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12635 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12636 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12637 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12639 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12640 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12641 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12642 IEM_MC_END();
12643}
12644#else
12645FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12646#endif
12647
12648/** Opcode 0x66 0x0f 0xc7 !11/6. */
12649#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12650FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12651{
12652 IEMOP_MNEMONIC(vmclear, "vmclear");
12653 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12654 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12655 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12656 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12658 IEMOP_HLP_DONE_DECODING();
12659 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12660 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12661 IEM_MC_END();
12662}
12663#else
12664FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12665#endif
12666
12667/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12668#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12669FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12670{
12671 IEMOP_MNEMONIC(vmxon, "vmxon");
12672 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12673 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12674 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12676 IEMOP_HLP_DONE_DECODING();
12677 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12678 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12679 IEM_MC_END();
12680}
12681#else
12682FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12683#endif
12684
12685/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12686#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12687FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12688{
12689 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12690 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12691 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12692 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12693 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12695 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12696 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12697 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, 0, iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12698 IEM_MC_END();
12699}
12700#else
12701FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12702#endif
12703
12704/** Opcode 0x0f 0xc7 11/7. */
12705FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12706{
12707 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12708 IEMOP_RAISE_INVALID_OPCODE_RET();
12709
12710 if (IEM_IS_MODRM_REG_MODE(bRm))
12711 {
12712 /* register destination. */
12713 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12715 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/ IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12716 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/ pVCpu->iem.s.enmEffOpSize, 1);
12717 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12718 RT_BIT_64(kIemNativeGstReg_GprFirst + IEM_GET_MODRM_RM(pVCpu, bRm)),
12719 iemCImpl_rdseed, iReg, enmEffOpSize);
12720 IEM_MC_END();
12721 }
12722 /* Register only. */
12723 else
12724 IEMOP_RAISE_INVALID_OPCODE_RET();
12725}
12726
12727/**
12728 * Group 9 jump table for register variant.
12729 */
12730IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12731{ /* pfx: none, 066h, 0f3h, 0f2h */
12732 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12733 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12734 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12735 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12736 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12737 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12738 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12739 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12740};
12741AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12742
12743
12744/**
12745 * Group 9 jump table for memory variant.
12746 */
12747IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12748{ /* pfx: none, 066h, 0f3h, 0f2h */
12749 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12750 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12751 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12752 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12753 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12754 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12755 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12756 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12757};
12758AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12759
12760
12761/** Opcode 0x0f 0xc7. */
12762FNIEMOP_DEF(iemOp_Grp9)
12763{
12764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12765 if (IEM_IS_MODRM_REG_MODE(bRm))
12766 /* register, register */
12767 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12768 + pVCpu->iem.s.idxPrefix], bRm);
12769 /* memory, register */
12770 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12771 + pVCpu->iem.s.idxPrefix], bRm);
12772}
12773
12774
12775/**
12776 * Common 'bswap register' helper.
12777 */
12778FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12779{
12780 switch (pVCpu->iem.s.enmEffOpSize)
12781 {
12782 case IEMMODE_16BIT:
12783 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_486, 0);
12784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12785 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12786 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12787 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12788 IEM_MC_ADVANCE_RIP_AND_FINISH();
12789 IEM_MC_END();
12790 break;
12791
12792 case IEMMODE_32BIT:
12793 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_486, 0);
12794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12795 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12796 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12797 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12798 IEM_MC_CLEAR_HIGH_GREG_U64(iReg);
12799 IEM_MC_ADVANCE_RIP_AND_FINISH();
12800 IEM_MC_END();
12801 break;
12802
12803 case IEMMODE_64BIT:
12804 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
12805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12806 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12807 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12808 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12809 IEM_MC_ADVANCE_RIP_AND_FINISH();
12810 IEM_MC_END();
12811 break;
12812
12813 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12814 }
12815}
12816
12817
12818/** Opcode 0x0f 0xc8. */
12819FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12820{
12821 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12822 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12823 prefix. REX.B is the correct prefix it appears. For a parallel
12824 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12825 IEMOP_HLP_MIN_486();
12826 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12827}
12828
12829
12830/** Opcode 0x0f 0xc9. */
12831FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12832{
12833 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12834 IEMOP_HLP_MIN_486();
12835 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12836}
12837
12838
12839/** Opcode 0x0f 0xca. */
12840FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12841{
12842 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12843 IEMOP_HLP_MIN_486();
12844 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12845}
12846
12847
12848/** Opcode 0x0f 0xcb. */
12849FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12850{
12851 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12852 IEMOP_HLP_MIN_486();
12853 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12854}
12855
12856
12857/** Opcode 0x0f 0xcc. */
12858FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12859{
12860 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12861 IEMOP_HLP_MIN_486();
12862 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12863}
12864
12865
12866/** Opcode 0x0f 0xcd. */
12867FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12868{
12869 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12870 IEMOP_HLP_MIN_486();
12871 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12872}
12873
12874
12875/** Opcode 0x0f 0xce. */
12876FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12877{
12878 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12879 IEMOP_HLP_MIN_486();
12880 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12881}
12882
12883
12884/** Opcode 0x0f 0xcf. */
12885FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12886{
12887 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12888 IEMOP_HLP_MIN_486();
12889 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12890}
12891
12892
12893/* Opcode 0x0f 0xd0 - invalid */
12894
12895
12896/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12897FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12898{
12899 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12900 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12901}
12902
12903
12904/* Opcode 0xf3 0x0f 0xd0 - invalid */
12905
12906
12907/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12908FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12909{
12910 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12911 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12912}
12913
12914
12915
12916/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12917FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12918{
12919 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12920 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12921}
12922
12923/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12924FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12925{
12926 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12927 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12928}
12929
12930/* Opcode 0xf3 0x0f 0xd1 - invalid */
12931/* Opcode 0xf2 0x0f 0xd1 - invalid */
12932
12933/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12934FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12935{
12936 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12937 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12938}
12939
12940
12941/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12942FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12943{
12944 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12945 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12946}
12947
12948
12949/* Opcode 0xf3 0x0f 0xd2 - invalid */
12950/* Opcode 0xf2 0x0f 0xd2 - invalid */
12951
12952/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12953FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12954{
12955 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12956 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12957}
12958
12959
12960/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12961FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12962{
12963 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12964 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12965}
12966
12967
12968/* Opcode 0xf3 0x0f 0xd3 - invalid */
12969/* Opcode 0xf2 0x0f 0xd3 - invalid */
12970
12971
12972/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12973FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12974{
12975 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12976 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_paddq_u64);
12977}
12978
12979
12980/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12981FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12982{
12983 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12984 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
12985}
12986
12987
12988/* Opcode 0xf3 0x0f 0xd4 - invalid */
12989/* Opcode 0xf2 0x0f 0xd4 - invalid */
12990
12991/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12992FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12993{
12994 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12995 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
12996}
12997
12998/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12999FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
13000{
13001 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13002 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
13003}
13004
13005
13006/* Opcode 0xf3 0x0f 0xd5 - invalid */
13007/* Opcode 0xf2 0x0f 0xd5 - invalid */
13008
13009/* Opcode 0x0f 0xd6 - invalid */
13010
13011/**
13012 * @opcode 0xd6
13013 * @oppfx 0x66
13014 * @opcpuid sse2
13015 * @opgroup og_sse2_pcksclr_datamove
13016 * @opxcpttype none
13017 * @optest op1=-1 op2=2 -> op1=2
13018 * @optest op1=0 op2=-42 -> op1=-42
13019 */
13020FNIEMOP_DEF(iemOp_movq_Wq_Vq)
13021{
13022 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13023 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13024 if (IEM_IS_MODRM_REG_MODE(bRm))
13025 {
13026 /*
13027 * Register, register.
13028 */
13029 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13031 IEM_MC_LOCAL(uint64_t, uSrc);
13032
13033 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13034 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13035
13036 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
13037 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
13038
13039 IEM_MC_ADVANCE_RIP_AND_FINISH();
13040 IEM_MC_END();
13041 }
13042 else
13043 {
13044 /*
13045 * Memory, register.
13046 */
13047 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13048 IEM_MC_LOCAL(uint64_t, uSrc);
13049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13050
13051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13053 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13054 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13055
13056 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
13057 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13058
13059 IEM_MC_ADVANCE_RIP_AND_FINISH();
13060 IEM_MC_END();
13061 }
13062}
13063
13064
13065/**
13066 * @opcode 0xd6
13067 * @opcodesub 11 mr/reg
13068 * @oppfx f3
13069 * @opcpuid sse2
13070 * @opgroup og_sse2_simdint_datamove
13071 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13072 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13073 */
13074FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
13075{
13076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13077 if (IEM_IS_MODRM_REG_MODE(bRm))
13078 {
13079 /*
13080 * Register, register.
13081 */
13082 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13083 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
13084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13085 IEM_MC_LOCAL(uint64_t, uSrc);
13086
13087 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13088 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13089 IEM_MC_FPU_TO_MMX_MODE();
13090
13091 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
13092 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
13093
13094 IEM_MC_ADVANCE_RIP_AND_FINISH();
13095 IEM_MC_END();
13096 }
13097
13098 /**
13099 * @opdone
13100 * @opmnemonic udf30fd6mem
13101 * @opcode 0xd6
13102 * @opcodesub !11 mr/reg
13103 * @oppfx f3
13104 * @opunused intel-modrm
13105 * @opcpuid sse
13106 * @optest ->
13107 */
13108 else
13109 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13110}
13111
13112
13113/**
13114 * @opcode 0xd6
13115 * @opcodesub 11 mr/reg
13116 * @oppfx f2
13117 * @opcpuid sse2
13118 * @opgroup og_sse2_simdint_datamove
13119 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13120 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13121 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
13122 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
13123 * @optest op1=-42 op2=0xfedcba9876543210
13124 * -> op1=0xfedcba9876543210 ftw=0xff
13125 */
13126FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
13127{
13128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13129 if (IEM_IS_MODRM_REG_MODE(bRm))
13130 {
13131 /*
13132 * Register, register.
13133 */
13134 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13135 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
13136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13137 IEM_MC_LOCAL(uint64_t, uSrc);
13138
13139 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13140 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13141 IEM_MC_FPU_TO_MMX_MODE();
13142
13143 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
13144 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
13145
13146 IEM_MC_ADVANCE_RIP_AND_FINISH();
13147 IEM_MC_END();
13148 }
13149
13150 /**
13151 * @opdone
13152 * @opmnemonic udf20fd6mem
13153 * @opcode 0xd6
13154 * @opcodesub !11 mr/reg
13155 * @oppfx f2
13156 * @opunused intel-modrm
13157 * @opcpuid sse
13158 * @optest ->
13159 */
13160 else
13161 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13162}
13163
13164
13165/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
13166FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
13167{
13168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13169 /* Docs says register only. */
13170 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13171 {
13172 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13173 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
13174 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
13175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13176 IEM_MC_ARG(uint64_t *, puDst, 0);
13177 IEM_MC_ARG(uint64_t const *, puSrc, 1);
13178 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13179 IEM_MC_PREPARE_FPU_USAGE();
13180 IEM_MC_FPU_TO_MMX_MODE();
13181
13182 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13183 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
13184 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
13185
13186 IEM_MC_ADVANCE_RIP_AND_FINISH();
13187 IEM_MC_END();
13188 }
13189 else
13190 IEMOP_RAISE_INVALID_OPCODE_RET();
13191}
13192
13193
13194/** Opcode 0x66 0x0f 0xd7 - */
13195FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
13196{
13197 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13198 /* Docs says register only. */
13199 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13200 {
13201 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13202 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
13203 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
13204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13205 IEM_MC_ARG(uint64_t *, puDst, 0);
13206 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
13207 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13208 IEM_MC_PREPARE_SSE_USAGE();
13209 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13210 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
13211 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
13212 IEM_MC_ADVANCE_RIP_AND_FINISH();
13213 IEM_MC_END();
13214 }
13215 else
13216 IEMOP_RAISE_INVALID_OPCODE_RET();
13217}
13218
13219
13220/* Opcode 0xf3 0x0f 0xd7 - invalid */
13221/* Opcode 0xf2 0x0f 0xd7 - invalid */
13222
13223
13224/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
13225FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
13226{
13227 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13228 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
13229}
13230
13231
13232/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
13233FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
13234{
13235 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13236 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
13237}
13238
13239
13240/* Opcode 0xf3 0x0f 0xd8 - invalid */
13241/* Opcode 0xf2 0x0f 0xd8 - invalid */
13242
13243/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
13244FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
13245{
13246 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13247 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
13248}
13249
13250
13251/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
13252FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
13253{
13254 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13255 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
13256}
13257
13258
13259/* Opcode 0xf3 0x0f 0xd9 - invalid */
13260/* Opcode 0xf2 0x0f 0xd9 - invalid */
13261
13262/** Opcode 0x0f 0xda - pminub Pq, Qq */
13263FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
13264{
13265 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13266 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
13267}
13268
13269
13270/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
13271FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
13272{
13273 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13274 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
13275}
13276
13277/* Opcode 0xf3 0x0f 0xda - invalid */
13278/* Opcode 0xf2 0x0f 0xda - invalid */
13279
13280/** Opcode 0x0f 0xdb - pand Pq, Qq */
13281FNIEMOP_DEF(iemOp_pand_Pq_Qq)
13282{
13283 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13284 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
13285}
13286
13287
13288/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
13289FNIEMOP_DEF(iemOp_pand_Vx_Wx)
13290{
13291 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13292 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
13293}
13294
13295
13296/* Opcode 0xf3 0x0f 0xdb - invalid */
13297/* Opcode 0xf2 0x0f 0xdb - invalid */
13298
13299/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
13300FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
13301{
13302 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13303 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
13304}
13305
13306
13307/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
13308FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
13309{
13310 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13311 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
13312}
13313
13314
13315/* Opcode 0xf3 0x0f 0xdc - invalid */
13316/* Opcode 0xf2 0x0f 0xdc - invalid */
13317
13318/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
13319FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
13320{
13321 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13322 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
13323}
13324
13325
13326/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13327FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13328{
13329 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13330 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
13331}
13332
13333
13334/* Opcode 0xf3 0x0f 0xdd - invalid */
13335/* Opcode 0xf2 0x0f 0xdd - invalid */
13336
13337/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13338FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13339{
13340 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13341 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
13342}
13343
13344
13345/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13346FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13347{
13348 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13349 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
13350}
13351
13352/* Opcode 0xf3 0x0f 0xde - invalid */
13353/* Opcode 0xf2 0x0f 0xde - invalid */
13354
13355
13356/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13357FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13358{
13359 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13360 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
13361}
13362
13363
13364/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13365FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13366{
13367 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13368 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
13369}
13370
13371
13372/* Opcode 0xf3 0x0f 0xdf - invalid */
13373/* Opcode 0xf2 0x0f 0xdf - invalid */
13374
13375/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13376FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13377{
13378 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13379 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13380}
13381
13382
13383/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13384FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13385{
13386 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13387 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13388}
13389
13390
13391/* Opcode 0xf3 0x0f 0xe0 - invalid */
13392/* Opcode 0xf2 0x0f 0xe0 - invalid */
13393
13394/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13395FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13396{
13397 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13398 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13399}
13400
13401
13402/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13403FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13404{
13405 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13406 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13407}
13408
13409
13410/* Opcode 0xf3 0x0f 0xe1 - invalid */
13411/* Opcode 0xf2 0x0f 0xe1 - invalid */
13412
13413/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13414FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13415{
13416 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13417 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13418}
13419
13420
13421/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13422FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13423{
13424 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13425 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13426}
13427
13428
13429/* Opcode 0xf3 0x0f 0xe2 - invalid */
13430/* Opcode 0xf2 0x0f 0xe2 - invalid */
13431
13432/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13433FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13434{
13435 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13436 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13437}
13438
13439
13440/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13441FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13442{
13443 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13444 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13445}
13446
13447
13448/* Opcode 0xf3 0x0f 0xe3 - invalid */
13449/* Opcode 0xf2 0x0f 0xe3 - invalid */
13450
13451/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13452FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13453{
13454 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13455 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13456}
13457
13458
13459/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13460FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13461{
13462 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13463 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13464}
13465
13466
13467/* Opcode 0xf3 0x0f 0xe4 - invalid */
13468/* Opcode 0xf2 0x0f 0xe4 - invalid */
13469
13470/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13471FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13472{
13473 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13474 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
13475}
13476
13477
13478/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13479FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13480{
13481 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13482 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
13483}
13484
13485
13486/* Opcode 0xf3 0x0f 0xe5 - invalid */
13487/* Opcode 0xf2 0x0f 0xe5 - invalid */
13488/* Opcode 0x0f 0xe6 - invalid */
13489
13490
13491/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13492FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13493{
13494 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13495 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13496}
13497
13498
13499/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13500FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13501{
13502 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13503 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13504}
13505
13506
13507/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13508FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13509{
13510 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13511 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13512}
13513
13514
13515/**
13516 * @opcode 0xe7
13517 * @opcodesub !11 mr/reg
13518 * @oppfx none
13519 * @opcpuid sse
13520 * @opgroup og_sse1_cachect
13521 * @opxcpttype none
13522 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13523 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13524 */
13525FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13526{
13527 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13528 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13529 if (IEM_IS_MODRM_MEM_MODE(bRm))
13530 {
13531 /* Register, memory. */
13532 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13533 IEM_MC_LOCAL(uint64_t, uSrc);
13534 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13535
13536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
13538 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13539 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13540 IEM_MC_FPU_TO_MMX_MODE();
13541
13542 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13543 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13544
13545 IEM_MC_ADVANCE_RIP_AND_FINISH();
13546 IEM_MC_END();
13547 }
13548 /**
13549 * @opdone
13550 * @opmnemonic ud0fe7reg
13551 * @opcode 0xe7
13552 * @opcodesub 11 mr/reg
13553 * @oppfx none
13554 * @opunused immediate
13555 * @opcpuid sse
13556 * @optest ->
13557 */
13558 else
13559 IEMOP_RAISE_INVALID_OPCODE_RET();
13560}
13561
13562/**
13563 * @opcode 0xe7
13564 * @opcodesub !11 mr/reg
13565 * @oppfx 0x66
13566 * @opcpuid sse2
13567 * @opgroup og_sse2_cachect
13568 * @opxcpttype 1
13569 * @optest op1=-1 op2=2 -> op1=2
13570 * @optest op1=0 op2=-42 -> op1=-42
13571 */
13572FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13573{
13574 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13576 if (IEM_IS_MODRM_MEM_MODE(bRm))
13577 {
13578 /* Register, memory. */
13579 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13580 IEM_MC_LOCAL(RTUINT128U, uSrc);
13581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13582
13583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13585 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13586 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13587
13588 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13589 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13590
13591 IEM_MC_ADVANCE_RIP_AND_FINISH();
13592 IEM_MC_END();
13593 }
13594
13595 /**
13596 * @opdone
13597 * @opmnemonic ud660fe7reg
13598 * @opcode 0xe7
13599 * @opcodesub 11 mr/reg
13600 * @oppfx 0x66
13601 * @opunused immediate
13602 * @opcpuid sse
13603 * @optest ->
13604 */
13605 else
13606 IEMOP_RAISE_INVALID_OPCODE_RET();
13607}
13608
13609/* Opcode 0xf3 0x0f 0xe7 - invalid */
13610/* Opcode 0xf2 0x0f 0xe7 - invalid */
13611
13612
13613/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13614FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13615{
13616 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13617 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
13618}
13619
13620
13621/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13622FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13623{
13624 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13625 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
13626}
13627
13628
13629/* Opcode 0xf3 0x0f 0xe8 - invalid */
13630/* Opcode 0xf2 0x0f 0xe8 - invalid */
13631
13632/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13633FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13634{
13635 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13636 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
13637}
13638
13639
13640/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13641FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13642{
13643 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13644 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
13645}
13646
13647
13648/* Opcode 0xf3 0x0f 0xe9 - invalid */
13649/* Opcode 0xf2 0x0f 0xe9 - invalid */
13650
13651
13652/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13653FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13654{
13655 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13656 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
13657}
13658
13659
13660/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13661FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13662{
13663 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13664 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
13665}
13666
13667
13668/* Opcode 0xf3 0x0f 0xea - invalid */
13669/* Opcode 0xf2 0x0f 0xea - invalid */
13670
13671
13672/** Opcode 0x0f 0xeb - por Pq, Qq */
13673FNIEMOP_DEF(iemOp_por_Pq_Qq)
13674{
13675 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13676 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
13677}
13678
13679
13680/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13681FNIEMOP_DEF(iemOp_por_Vx_Wx)
13682{
13683 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13684 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
13685}
13686
13687
13688/* Opcode 0xf3 0x0f 0xeb - invalid */
13689/* Opcode 0xf2 0x0f 0xeb - invalid */
13690
13691/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13692FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13693{
13694 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13695 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
13696}
13697
13698
13699/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13700FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13701{
13702 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13703 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
13704}
13705
13706
13707/* Opcode 0xf3 0x0f 0xec - invalid */
13708/* Opcode 0xf2 0x0f 0xec - invalid */
13709
13710/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13711FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13712{
13713 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13714 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
13715}
13716
13717
13718/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13719FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13720{
13721 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13722 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
13723}
13724
13725
13726/* Opcode 0xf3 0x0f 0xed - invalid */
13727/* Opcode 0xf2 0x0f 0xed - invalid */
13728
13729
13730/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13731FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13732{
13733 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13734 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13735}
13736
13737
13738/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13739FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13740{
13741 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13742 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13743}
13744
13745
13746/* Opcode 0xf3 0x0f 0xee - invalid */
13747/* Opcode 0xf2 0x0f 0xee - invalid */
13748
13749
13750/** Opcode 0x0f 0xef - pxor Pq, Qq */
13751FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13752{
13753 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13754 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
13755}
13756
13757
13758/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13759FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13760{
13761 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13762 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
13763}
13764
13765
13766/* Opcode 0xf3 0x0f 0xef - invalid */
13767/* Opcode 0xf2 0x0f 0xef - invalid */
13768
13769/* Opcode 0x0f 0xf0 - invalid */
13770/* Opcode 0x66 0x0f 0xf0 - invalid */
13771
13772
13773/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13774FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13775{
13776 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13777 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13778 if (IEM_IS_MODRM_REG_MODE(bRm))
13779 {
13780 /*
13781 * Register, register - (not implemented, assuming it raises \#UD).
13782 */
13783 IEMOP_RAISE_INVALID_OPCODE_RET();
13784 }
13785 else
13786 {
13787 /*
13788 * Register, memory.
13789 */
13790 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13791 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13793
13794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
13796 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13797 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13798 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13799 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13800
13801 IEM_MC_ADVANCE_RIP_AND_FINISH();
13802 IEM_MC_END();
13803 }
13804}
13805
13806
13807/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13808FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13809{
13810 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13811 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13812}
13813
13814
13815/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13816FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13817{
13818 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13819 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13820}
13821
13822
13823/* Opcode 0xf2 0x0f 0xf1 - invalid */
13824
13825/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13826FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13827{
13828 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13829 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13830}
13831
13832
13833/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13834FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13835{
13836 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13837 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13838}
13839
13840
13841/* Opcode 0xf2 0x0f 0xf2 - invalid */
13842
13843/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13844FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13845{
13846 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13847 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13848}
13849
13850
13851/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13852FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13853{
13854 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13855 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13856}
13857
13858/* Opcode 0xf2 0x0f 0xf3 - invalid */
13859
13860/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13861FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13862{
13863 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13864 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
13865}
13866
13867
13868/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13869FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13870{
13871 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13872 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
13873}
13874
13875
13876/* Opcode 0xf2 0x0f 0xf4 - invalid */
13877
13878/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13879FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13880{
13881 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13882 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13883}
13884
13885
13886/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13887FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13888{
13889 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13890 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13891}
13892
13893/* Opcode 0xf2 0x0f 0xf5 - invalid */
13894
13895/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13896FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13897{
13898 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13899 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13900}
13901
13902
13903/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13904FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13905{
13906 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13907 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13908}
13909
13910
13911/* Opcode 0xf2 0x0f 0xf6 - invalid */
13912
13913/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13914FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13915/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13916FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13917/* Opcode 0xf2 0x0f 0xf7 - invalid */
13918
13919
13920/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13921FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13922{
13923 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13924 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
13925}
13926
13927
13928/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13929FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13930{
13931 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13932 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
13933}
13934
13935
13936/* Opcode 0xf2 0x0f 0xf8 - invalid */
13937
13938
13939/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13940FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13941{
13942 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13943 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
13944}
13945
13946
13947/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13948FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13949{
13950 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13951 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
13952}
13953
13954
13955/* Opcode 0xf2 0x0f 0xf9 - invalid */
13956
13957
13958/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13959FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13960{
13961 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13962 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
13963}
13964
13965
13966/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13967FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13968{
13969 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13970 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
13971}
13972
13973
13974/* Opcode 0xf2 0x0f 0xfa - invalid */
13975
13976
13977/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13978FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13979{
13980 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13981 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_psubq_u64);
13982}
13983
13984
13985/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13986FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13987{
13988 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13989 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
13990}
13991
13992
13993/* Opcode 0xf2 0x0f 0xfb - invalid */
13994
13995
13996/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13997FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13998{
13999 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14000 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
14001}
14002
14003
14004/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
14005FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
14006{
14007 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14008 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
14009}
14010
14011
14012/* Opcode 0xf2 0x0f 0xfc - invalid */
14013
14014
14015/** Opcode 0x0f 0xfd - paddw Pq, Qq */
14016FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
14017{
14018 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14019 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
14020}
14021
14022
14023/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
14024FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
14025{
14026 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14027 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
14028}
14029
14030
14031/* Opcode 0xf2 0x0f 0xfd - invalid */
14032
14033
14034/** Opcode 0x0f 0xfe - paddd Pq, Qq */
14035FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
14036{
14037 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
14038 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
14039}
14040
14041
14042/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
14043FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
14044{
14045 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
14046 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
14047}
14048
14049
14050/* Opcode 0xf2 0x0f 0xfe - invalid */
14051
14052
14053/** Opcode **** 0x0f 0xff - UD0 */
14054FNIEMOP_DEF(iemOp_ud0)
14055{
14056 IEMOP_MNEMONIC(ud0, "ud0");
14057 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
14058 {
14059 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
14060 if (IEM_IS_MODRM_MEM_MODE(bRm))
14061 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
14062 }
14063 IEMOP_HLP_DONE_DECODING();
14064 IEMOP_RAISE_INVALID_OPCODE_RET();
14065}
14066
14067
14068
14069/**
14070 * Two byte opcode map, first byte 0x0f.
14071 *
14072 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
14073 * check if it needs updating as well when making changes.
14074 */
14075const PFNIEMOP g_apfnTwoByteMap[] =
14076{
14077 /* no prefix, 066h prefix f3h prefix, f2h prefix */
14078 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
14079 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
14080 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
14081 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
14082 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
14083 /* 0x05 */ IEMOP_X4(iemOp_syscall),
14084 /* 0x06 */ IEMOP_X4(iemOp_clts),
14085 /* 0x07 */ IEMOP_X4(iemOp_sysret),
14086 /* 0x08 */ IEMOP_X4(iemOp_invd),
14087 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
14088 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
14089 /* 0x0b */ IEMOP_X4(iemOp_ud2),
14090 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
14091 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
14092 /* 0x0e */ IEMOP_X4(iemOp_femms),
14093 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
14094
14095 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
14096 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
14097 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
14098 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14099 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14100 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14101 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
14102 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14103 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
14104 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
14105 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
14106 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
14107 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
14108 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
14109 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
14110 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
14111
14112 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
14113 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
14114 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
14115 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
14116 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
14117 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14118 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
14119 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14120 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14121 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14122 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
14123 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14124 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
14125 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
14126 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14127 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14128
14129 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
14130 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
14131 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
14132 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
14133 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
14134 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
14135 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
14136 /* 0x37 */ IEMOP_X4(iemOp_getsec),
14137 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
14138 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14139 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
14140 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14141 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14142 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14143 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14144 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14145
14146 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
14147 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
14148 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
14149 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
14150 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
14151 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
14152 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
14153 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
14154 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
14155 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
14156 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
14157 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
14158 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
14159 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
14160 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
14161 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
14162
14163 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14164 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
14165 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
14166 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
14167 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14168 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14169 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14170 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14171 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
14172 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
14173 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
14174 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
14175 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
14176 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
14177 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
14178 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
14179
14180 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14181 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14182 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14183 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14184 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14185 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14186 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14187 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14188 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14189 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14190 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14191 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14192 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14193 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14194 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14195 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
14196
14197 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
14198 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
14199 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
14200 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
14201 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14202 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14203 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14204 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14205
14206 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14207 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14208 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14209 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14210 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
14211 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
14212 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
14213 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
14214
14215 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
14216 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
14217 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
14218 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
14219 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
14220 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
14221 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
14222 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
14223 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
14224 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
14225 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
14226 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
14227 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
14228 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
14229 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
14230 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
14231
14232 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
14233 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
14234 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
14235 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
14236 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
14237 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
14238 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
14239 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
14240 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
14241 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
14242 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
14243 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
14244 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
14245 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
14246 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
14247 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
14248
14249 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
14250 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
14251 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
14252 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
14253 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
14254 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
14255 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
14256 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
14257 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
14258 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
14259 /* 0xaa */ IEMOP_X4(iemOp_rsm),
14260 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
14261 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
14262 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
14263 /* 0xae */ IEMOP_X4(iemOp_Grp15),
14264 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
14265
14266 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
14267 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
14268 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
14269 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
14270 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
14271 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
14272 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
14273 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
14274 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
14275 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
14276 /* 0xba */ IEMOP_X4(iemOp_Grp8),
14277 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
14278 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
14279 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
14280 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
14281 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
14282
14283 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
14284 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
14285 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
14286 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14287 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14288 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14289 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14290 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
14291 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
14292 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
14293 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
14294 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
14295 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
14296 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
14297 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
14298 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
14299
14300 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
14301 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14302 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14303 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14304 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14305 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14306 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
14307 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14308 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14309 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14310 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14311 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14312 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14313 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14314 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14315 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14316
14317 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14318 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14319 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14320 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14321 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14322 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14323 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
14324 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14325 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14326 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14327 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14328 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14329 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14330 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14331 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14332 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14333
14334 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14335 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14336 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14337 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14338 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14339 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14340 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14341 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14342 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14343 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14344 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14345 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14346 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14347 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14348 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14349 /* 0xff */ IEMOP_X4(iemOp_ud0),
14350};
14351AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14352
14353/** @} */
14354
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette