VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h@ 101950

Last change on this file since 101950 was 101950, checked in by vboxsync, 15 months ago

VMM/IEM: Eliminated IEM_MC_ASSIGN use from IEMAllInstTwoByte0f.cpp.h, replacing it with IEM_MC_ARG_CONST/IEM_MC_LOCAL_ASSIGN and moved IEM_MC_ARG* and IEM_MC_LOCAL* from the top of the MC-blocks being modified to where they are actually first needed, reducing the life time and troubles for the recompiler. bugref:10371

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 518.4 KB
Line 
1/* $Id: IEMAllInstTwoByte0f.cpp.h 101950 2023-11-08 01:57:15Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstVexMap1.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Two byte opcodes (first byte 0x0f).
33 *
34 * @{
35 */
36
37
38/**
39 * Common worker for MMX instructions on the form:
40 * pxxx mm1, mm2/mem64
41 */
42FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * MMX, MMX.
49 */
50 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
51 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
52 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
53 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
54 IEM_MC_ARG(uint64_t *, pDst, 0);
55 IEM_MC_ARG(uint64_t const *, pSrc, 1);
56 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
57 IEM_MC_PREPARE_FPU_USAGE();
58 IEM_MC_FPU_TO_MMX_MODE();
59
60 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
61 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
62 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
63 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
64
65 IEM_MC_ADVANCE_RIP_AND_FINISH();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * MMX, [mem64].
72 */
73 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
74 IEM_MC_ARG(uint64_t *, pDst, 0);
75 IEM_MC_LOCAL(uint64_t, uSrc);
76 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
81 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_FPU_USAGE();
85 IEM_MC_FPU_TO_MMX_MODE();
86
87 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
88 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
89 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
90
91 IEM_MC_ADVANCE_RIP_AND_FINISH();
92 IEM_MC_END();
93 }
94}
95
96
97/**
98 * Common worker for MMX instructions on the form:
99 * pxxx mm1, mm2/mem64
100 *
101 * Unlike iemOpCommonMmx_FullFull_To_Full, the @a pfnU64 worker function takes
102 * no FXSAVE state, just the operands.
103 */
104FNIEMOP_DEF_1(iemOpCommonMmxOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
105{
106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
107 if (IEM_IS_MODRM_REG_MODE(bRm))
108 {
109 /*
110 * MMX, MMX.
111 */
112 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
113 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
114 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
116 IEM_MC_ARG(uint64_t *, pDst, 0);
117 IEM_MC_ARG(uint64_t const *, pSrc, 1);
118 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
119 IEM_MC_PREPARE_FPU_USAGE();
120 IEM_MC_FPU_TO_MMX_MODE();
121
122 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
123 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
124 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
125 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
126
127 IEM_MC_ADVANCE_RIP_AND_FINISH();
128 IEM_MC_END();
129 }
130 else
131 {
132 /*
133 * MMX, [mem64].
134 */
135 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
136 IEM_MC_ARG(uint64_t *, pDst, 0);
137 IEM_MC_LOCAL(uint64_t, uSrc);
138 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
140
141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
143 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
144 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
145
146 IEM_MC_PREPARE_FPU_USAGE();
147 IEM_MC_FPU_TO_MMX_MODE();
148
149 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
150 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
151 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
152
153 IEM_MC_ADVANCE_RIP_AND_FINISH();
154 IEM_MC_END();
155 }
156}
157
158
159/**
160 * Common worker for MMX instructions on the form:
161 * pxxx mm1, mm2/mem64
162 * for instructions introduced with SSE.
163 */
164FNIEMOP_DEF_1(iemOpCommonMmxSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U64, pfnU64)
165{
166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
167 if (IEM_IS_MODRM_REG_MODE(bRm))
168 {
169 /*
170 * MMX, MMX.
171 */
172 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
173 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
174 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
176 IEM_MC_ARG(uint64_t *, pDst, 0);
177 IEM_MC_ARG(uint64_t const *, pSrc, 1);
178 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
179 IEM_MC_PREPARE_FPU_USAGE();
180 IEM_MC_FPU_TO_MMX_MODE();
181
182 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
183 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
184 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
185 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
186
187 IEM_MC_ADVANCE_RIP_AND_FINISH();
188 IEM_MC_END();
189 }
190 else
191 {
192 /*
193 * MMX, [mem64].
194 */
195 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
196 IEM_MC_ARG(uint64_t *, pDst, 0);
197 IEM_MC_LOCAL(uint64_t, uSrc);
198 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
200
201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
203 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
204 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
205
206 IEM_MC_PREPARE_FPU_USAGE();
207 IEM_MC_FPU_TO_MMX_MODE();
208
209 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
210 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
211 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
212
213 IEM_MC_ADVANCE_RIP_AND_FINISH();
214 IEM_MC_END();
215 }
216}
217
218
219/**
220 * Common worker for MMX instructions on the form:
221 * pxxx mm1, mm2/mem64
222 * for instructions introduced with SSE.
223 *
224 * Unlike iemOpCommonMmxSse_FullFull_To_Full, the @a pfnU64 worker function takes
225 * no FXSAVE state, just the operands.
226 */
227FNIEMOP_DEF_1(iemOpCommonMmxSseOpt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
228{
229 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
230 if (IEM_IS_MODRM_REG_MODE(bRm))
231 {
232 /*
233 * MMX, MMX.
234 */
235 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
236 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
237 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
239 IEM_MC_ARG(uint64_t *, pDst, 0);
240 IEM_MC_ARG(uint64_t const *, pSrc, 1);
241 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
242 IEM_MC_PREPARE_FPU_USAGE();
243 IEM_MC_FPU_TO_MMX_MODE();
244
245 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
246 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
247 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
248 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
249
250 IEM_MC_ADVANCE_RIP_AND_FINISH();
251 IEM_MC_END();
252 }
253 else
254 {
255 /*
256 * MMX, [mem64].
257 */
258 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
259 IEM_MC_ARG(uint64_t *, pDst, 0);
260 IEM_MC_LOCAL(uint64_t, uSrc);
261 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
262 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
263
264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
266 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
267 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
268
269 IEM_MC_PREPARE_FPU_USAGE();
270 IEM_MC_FPU_TO_MMX_MODE();
271
272 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
273 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, pSrc);
274 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
275
276 IEM_MC_ADVANCE_RIP_AND_FINISH();
277 IEM_MC_END();
278 }
279}
280
281
282/**
283 * Common worker for MMX instructions on the form:
284 * pxxx mm1, mm2/mem64
285 * that was introduced with SSE2.
286 */
287FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full_Sse2, PFNIEMAIMPLMEDIAF2U64, pfnU64)
288{
289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
290 if (IEM_IS_MODRM_REG_MODE(bRm))
291 {
292 /*
293 * MMX, MMX.
294 */
295 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
296 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
297 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
299 IEM_MC_ARG(uint64_t *, pDst, 0);
300 IEM_MC_ARG(uint64_t const *, pSrc, 1);
301 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
302 IEM_MC_PREPARE_FPU_USAGE();
303 IEM_MC_FPU_TO_MMX_MODE();
304
305 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
306 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
307 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
308 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
309
310 IEM_MC_ADVANCE_RIP_AND_FINISH();
311 IEM_MC_END();
312 }
313 else
314 {
315 /*
316 * MMX, [mem64].
317 */
318 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
319 IEM_MC_ARG(uint64_t *, pDst, 0);
320 IEM_MC_LOCAL(uint64_t, uSrc);
321 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
323
324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
326 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
327 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
328
329 IEM_MC_PREPARE_FPU_USAGE();
330 IEM_MC_FPU_TO_MMX_MODE();
331
332 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
333 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
334 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
335
336 IEM_MC_ADVANCE_RIP_AND_FINISH();
337 IEM_MC_END();
338 }
339}
340
341
342/**
343 * Common worker for SSE instructions of the form:
344 * pxxx xmm1, xmm2/mem128
345 *
346 * Proper alignment of the 128-bit operand is enforced.
347 * SSE cpuid checks. No SIMD FP exceptions.
348 *
349 * @sa iemOpCommonSse2_FullFull_To_Full
350 */
351FNIEMOP_DEF_1(iemOpCommonSse_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
352{
353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
354 if (IEM_IS_MODRM_REG_MODE(bRm))
355 {
356 /*
357 * XMM, XMM.
358 */
359 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
361 IEM_MC_ARG(PRTUINT128U, pDst, 0);
362 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
363 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
364 IEM_MC_PREPARE_SSE_USAGE();
365 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
366 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
367 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
368 IEM_MC_ADVANCE_RIP_AND_FINISH();
369 IEM_MC_END();
370 }
371 else
372 {
373 /*
374 * XMM, [mem128].
375 */
376 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
377 IEM_MC_ARG(PRTUINT128U, pDst, 0);
378 IEM_MC_LOCAL(RTUINT128U, uSrc);
379 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
381
382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
384 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
385 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
386
387 IEM_MC_PREPARE_SSE_USAGE();
388 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
389 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
390
391 IEM_MC_ADVANCE_RIP_AND_FINISH();
392 IEM_MC_END();
393 }
394}
395
396
397/**
398 * Common worker for SSE2 instructions on the forms:
399 * pxxx xmm1, xmm2/mem128
400 *
401 * Proper alignment of the 128-bit operand is enforced.
402 * Exceptions type 4. SSE2 cpuid checks.
403 *
404 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
405 */
406FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
407{
408 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
409 if (IEM_IS_MODRM_REG_MODE(bRm))
410 {
411 /*
412 * XMM, XMM.
413 */
414 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
416 IEM_MC_ARG(PRTUINT128U, pDst, 0);
417 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
418 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
419 IEM_MC_PREPARE_SSE_USAGE();
420 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
421 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
422 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
423 IEM_MC_ADVANCE_RIP_AND_FINISH();
424 IEM_MC_END();
425 }
426 else
427 {
428 /*
429 * XMM, [mem128].
430 */
431 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
432 IEM_MC_ARG(PRTUINT128U, pDst, 0);
433 IEM_MC_LOCAL(RTUINT128U, uSrc);
434 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
436
437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
439 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
440 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
441
442 IEM_MC_PREPARE_SSE_USAGE();
443 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
444 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, pDst, pSrc);
445
446 IEM_MC_ADVANCE_RIP_AND_FINISH();
447 IEM_MC_END();
448 }
449}
450
451
452/**
453 * Common worker for SSE2 instructions on the forms:
454 * pxxx xmm1, xmm2/mem128
455 *
456 * Proper alignment of the 128-bit operand is enforced.
457 * Exceptions type 4. SSE2 cpuid checks.
458 *
459 * Unlike iemOpCommonSse2_FullFull_To_Full, the @a pfnU128 worker function takes
460 * no FXSAVE state, just the operands.
461 *
462 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
463 */
464FNIEMOP_DEF_1(iemOpCommonSse2Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
465{
466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
467 if (IEM_IS_MODRM_REG_MODE(bRm))
468 {
469 /*
470 * XMM, XMM.
471 */
472 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
474 IEM_MC_ARG(PRTUINT128U, pDst, 0);
475 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
476 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
477 IEM_MC_PREPARE_SSE_USAGE();
478 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
479 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
480 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
481 IEM_MC_ADVANCE_RIP_AND_FINISH();
482 IEM_MC_END();
483 }
484 else
485 {
486 /*
487 * XMM, [mem128].
488 */
489 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
490 IEM_MC_ARG(PRTUINT128U, pDst, 0);
491 IEM_MC_LOCAL(RTUINT128U, uSrc);
492 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
494
495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
497 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
498 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
499
500 IEM_MC_PREPARE_SSE_USAGE();
501 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
502 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, pSrc);
503
504 IEM_MC_ADVANCE_RIP_AND_FINISH();
505 IEM_MC_END();
506 }
507}
508
509
510/**
511 * Common worker for MMX instructions on the forms:
512 * pxxxx mm1, mm2/mem32
513 *
514 * The 2nd operand is the first half of a register, which in the memory case
515 * means a 32-bit memory access.
516 */
517FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
518{
519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
520 if (IEM_IS_MODRM_REG_MODE(bRm))
521 {
522 /*
523 * MMX, MMX.
524 */
525 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
527 IEM_MC_ARG(uint64_t *, puDst, 0);
528 IEM_MC_ARG(uint64_t const *, puSrc, 1);
529 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
530 IEM_MC_PREPARE_FPU_USAGE();
531 IEM_MC_FPU_TO_MMX_MODE();
532
533 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
534 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
535 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
536 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
537
538 IEM_MC_ADVANCE_RIP_AND_FINISH();
539 IEM_MC_END();
540 }
541 else
542 {
543 /*
544 * MMX, [mem32].
545 */
546 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
547 IEM_MC_ARG(uint64_t *, puDst, 0);
548 IEM_MC_LOCAL(uint64_t, uSrc);
549 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
551
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
554 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
555 IEM_MC_FETCH_MEM_U32_ZX_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
556
557 IEM_MC_PREPARE_FPU_USAGE();
558 IEM_MC_FPU_TO_MMX_MODE();
559
560 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
561 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
562 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
563
564 IEM_MC_ADVANCE_RIP_AND_FINISH();
565 IEM_MC_END();
566 }
567}
568
569
570/**
571 * Common worker for SSE instructions on the forms:
572 * pxxxx xmm1, xmm2/mem128
573 *
574 * The 2nd operand is the first half of a register, which in the memory case
575 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
576 *
577 * Exceptions type 4.
578 */
579FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
580{
581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
582 if (IEM_IS_MODRM_REG_MODE(bRm))
583 {
584 /*
585 * XMM, XMM.
586 */
587 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
589 IEM_MC_ARG(PRTUINT128U, puDst, 0);
590 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
591 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
592 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
593 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
594 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
595 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
596 IEM_MC_ADVANCE_RIP_AND_FINISH();
597 IEM_MC_END();
598 }
599 else
600 {
601 /*
602 * XMM, [mem128].
603 */
604 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
605 IEM_MC_ARG(PRTUINT128U, puDst, 0);
606 IEM_MC_LOCAL(RTUINT128U, uSrc);
607 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
609
610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
612 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
613 /** @todo Most CPUs probably only read the low qword. We read everything to
614 * make sure we apply segmentation and alignment checks correctly.
615 * When we have time, it would be interesting to explore what real
616 * CPUs actually does and whether it will do a TLB load for the high
617 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
618 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
619
620 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
621 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
622 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
623
624 IEM_MC_ADVANCE_RIP_AND_FINISH();
625 IEM_MC_END();
626 }
627}
628
629
630/**
631 * Common worker for SSE2 instructions on the forms:
632 * pxxxx xmm1, xmm2/mem128
633 *
634 * The 2nd operand is the first half of a register, which in the memory case
635 * 128-bit aligned 64-bit or 128-bit memory accessed for SSE.
636 *
637 * Exceptions type 4.
638 */
639FNIEMOP_DEF_1(iemOpCommonSse2_LowLow_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
640{
641 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
642 if (IEM_IS_MODRM_REG_MODE(bRm))
643 {
644 /*
645 * XMM, XMM.
646 */
647 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
649 IEM_MC_ARG(PRTUINT128U, puDst, 0);
650 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
651 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
653 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
654 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
655 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
656 IEM_MC_ADVANCE_RIP_AND_FINISH();
657 IEM_MC_END();
658 }
659 else
660 {
661 /*
662 * XMM, [mem128].
663 */
664 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
665 IEM_MC_ARG(PRTUINT128U, puDst, 0);
666 IEM_MC_LOCAL(RTUINT128U, uSrc);
667 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
668 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
669
670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
672 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
673 /** @todo Most CPUs probably only read the low qword. We read everything to
674 * make sure we apply segmentation and alignment checks correctly.
675 * When we have time, it would be interesting to explore what real
676 * CPUs actually does and whether it will do a TLB load for the high
677 * part or skip any associated \#PF. Ditto for segmentation \#GPs. */
678 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
679
680 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
681 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
682 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
683
684 IEM_MC_ADVANCE_RIP_AND_FINISH();
685 IEM_MC_END();
686 }
687}
688
689
690/**
691 * Common worker for MMX instructions on the form:
692 * pxxxx mm1, mm2/mem64
693 *
694 * The 2nd operand is the second half of a register, which in the memory case
695 * means a 64-bit memory access for MMX.
696 */
697FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U64, pfnU64)
698{
699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
700 if (IEM_IS_MODRM_REG_MODE(bRm))
701 {
702 /*
703 * MMX, MMX.
704 */
705 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
706 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
707 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
709 IEM_MC_ARG(uint64_t *, puDst, 0);
710 IEM_MC_ARG(uint64_t const *, puSrc, 1);
711 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
712 IEM_MC_PREPARE_FPU_USAGE();
713 IEM_MC_FPU_TO_MMX_MODE();
714
715 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
716 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
717 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
718 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
719
720 IEM_MC_ADVANCE_RIP_AND_FINISH();
721 IEM_MC_END();
722 }
723 else
724 {
725 /*
726 * MMX, [mem64].
727 */
728 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
729 IEM_MC_ARG(uint64_t *, puDst, 0);
730 IEM_MC_LOCAL(uint64_t, uSrc);
731 IEM_MC_ARG_LOCAL_REF(uint64_t const *, puSrc, uSrc, 1);
732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
733
734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
736 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
737 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* intel docs this to be full 64-bit read */
738
739 IEM_MC_PREPARE_FPU_USAGE();
740 IEM_MC_FPU_TO_MMX_MODE();
741
742 IEM_MC_REF_MREG_U64(puDst, IEM_GET_MODRM_REG_8(bRm));
743 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, puDst, puSrc);
744 IEM_MC_MODIFIED_MREG_BY_REF(puDst);
745
746 IEM_MC_ADVANCE_RIP_AND_FINISH();
747 IEM_MC_END();
748 }
749}
750
751
752/**
753 * Common worker for SSE instructions on the form:
754 * pxxxx xmm1, xmm2/mem128
755 *
756 * The 2nd operand is the second half of a register, which for SSE a 128-bit
757 * aligned access where it may read the full 128 bits or only the upper 64 bits.
758 *
759 * Exceptions type 4.
760 */
761FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
762{
763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
764 if (IEM_IS_MODRM_REG_MODE(bRm))
765 {
766 /*
767 * XMM, XMM.
768 */
769 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
771 IEM_MC_ARG(PRTUINT128U, puDst, 0);
772 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
773 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
774 IEM_MC_PREPARE_SSE_USAGE();
775 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
776 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
777 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
778 IEM_MC_ADVANCE_RIP_AND_FINISH();
779 IEM_MC_END();
780 }
781 else
782 {
783 /*
784 * XMM, [mem128].
785 */
786 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
787 IEM_MC_ARG(PRTUINT128U, puDst, 0);
788 IEM_MC_LOCAL(RTUINT128U, uSrc);
789 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
791
792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
794 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
795 /** @todo Most CPUs probably only read the high qword. We read everything to
796 * make sure we apply segmentation and alignment checks correctly.
797 * When we have time, it would be interesting to explore what real
798 * CPUs actually does and whether it will do a TLB load for the lower
799 * part or skip any associated \#PF. */
800 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
801
802 IEM_MC_PREPARE_SSE_USAGE();
803 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
804 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
805
806 IEM_MC_ADVANCE_RIP_AND_FINISH();
807 IEM_MC_END();
808 }
809}
810
811
812/**
813 * Common worker for SSE instructions on the forms:
814 * pxxs xmm1, xmm2/mem128
815 *
816 * Proper alignment of the 128-bit operand is enforced.
817 * Exceptions type 2. SSE cpuid checks.
818 *
819 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
820 */
821FNIEMOP_DEF_1(iemOpCommonSseFp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
822{
823 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
824 if (IEM_IS_MODRM_REG_MODE(bRm))
825 {
826 /*
827 * XMM128, XMM128.
828 */
829 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
831 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
832 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
833 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
834 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
835 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
836 IEM_MC_PREPARE_SSE_USAGE();
837 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
838 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
839 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
840 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
841 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
842
843 IEM_MC_ADVANCE_RIP_AND_FINISH();
844 IEM_MC_END();
845 }
846 else
847 {
848 /*
849 * XMM128, [mem128].
850 */
851 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
852 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
853 IEM_MC_LOCAL(X86XMMREG, uSrc2);
854 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
855 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
856 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
857 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
858
859 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
861 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
862 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
863
864 IEM_MC_PREPARE_SSE_USAGE();
865 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
866 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
867 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
868 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
869
870 IEM_MC_ADVANCE_RIP_AND_FINISH();
871 IEM_MC_END();
872 }
873}
874
875
876/**
877 * Common worker for SSE instructions on the forms:
878 * pxxs xmm1, xmm2/mem32
879 *
880 * Proper alignment of the 128-bit operand is enforced.
881 * Exceptions type 2. SSE cpuid checks.
882 *
883 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
884 */
885FNIEMOP_DEF_1(iemOpCommonSseFp_FullR32_To_Full, PFNIEMAIMPLFPSSEF2U128R32, pfnU128_R32)
886{
887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
888 if (IEM_IS_MODRM_REG_MODE(bRm))
889 {
890 /*
891 * XMM128, XMM32.
892 */
893 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
895 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
896 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
897 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
898 IEM_MC_ARG(PCRTFLOAT32U, pSrc2, 2);
899 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
900 IEM_MC_PREPARE_SSE_USAGE();
901 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
902 IEM_MC_REF_XREG_R32_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
903 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pSrc2);
904 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
905 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
906
907 IEM_MC_ADVANCE_RIP_AND_FINISH();
908 IEM_MC_END();
909 }
910 else
911 {
912 /*
913 * XMM128, [mem32].
914 */
915 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
916 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
917 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
918 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
919 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
920 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
922
923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
925 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
926 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
927
928 IEM_MC_PREPARE_SSE_USAGE();
929 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
930 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R32, pSseRes, pSrc1, pr32Src2);
931 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
932 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
933
934 IEM_MC_ADVANCE_RIP_AND_FINISH();
935 IEM_MC_END();
936 }
937}
938
939
940/**
941 * Common worker for SSE2 instructions on the forms:
942 * pxxd xmm1, xmm2/mem128
943 *
944 * Proper alignment of the 128-bit operand is enforced.
945 * Exceptions type 2. SSE cpuid checks.
946 *
947 * @sa iemOpCommonSseFp_FullFull_To_Full
948 */
949FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
950{
951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
952 if (IEM_IS_MODRM_REG_MODE(bRm))
953 {
954 /*
955 * XMM128, XMM128.
956 */
957 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
959 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
960 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
961 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
962 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
963 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
964 IEM_MC_PREPARE_SSE_USAGE();
965 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
966 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
967 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
968 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
969 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
970
971 IEM_MC_ADVANCE_RIP_AND_FINISH();
972 IEM_MC_END();
973 }
974 else
975 {
976 /*
977 * XMM128, [mem128].
978 */
979 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
980 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
981 IEM_MC_LOCAL(X86XMMREG, uSrc2);
982 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
983 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
984 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
986
987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
989 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
990 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
991
992 IEM_MC_PREPARE_SSE_USAGE();
993 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
994 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
995 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
996 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
997
998 IEM_MC_ADVANCE_RIP_AND_FINISH();
999 IEM_MC_END();
1000 }
1001}
1002
1003
1004/**
1005 * Common worker for SSE2 instructions on the forms:
1006 * pxxs xmm1, xmm2/mem64
1007 *
1008 * Proper alignment of the 128-bit operand is enforced.
1009 * Exceptions type 2. SSE2 cpuid checks.
1010 *
1011 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1012 */
1013FNIEMOP_DEF_1(iemOpCommonSse2Fp_FullR64_To_Full, PFNIEMAIMPLFPSSEF2U128R64, pfnU128_R64)
1014{
1015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1016 if (IEM_IS_MODRM_REG_MODE(bRm))
1017 {
1018 /*
1019 * XMM, XMM.
1020 */
1021 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1023 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1024 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1025 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1026 IEM_MC_ARG(PCRTFLOAT64U, pSrc2, 2);
1027 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1028 IEM_MC_PREPARE_SSE_USAGE();
1029 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1030 IEM_MC_REF_XREG_R64_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1031 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pSrc2);
1032 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1033 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1034
1035 IEM_MC_ADVANCE_RIP_AND_FINISH();
1036 IEM_MC_END();
1037 }
1038 else
1039 {
1040 /*
1041 * XMM, [mem64].
1042 */
1043 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1044 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1045 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
1046 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1047 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1048 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
1049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1050
1051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1053 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1054 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1055
1056 IEM_MC_PREPARE_SSE_USAGE();
1057 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1058 IEM_MC_CALL_SSE_AIMPL_3(pfnU128_R64, pSseRes, pSrc1, pr64Src2);
1059 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1060 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1061
1062 IEM_MC_ADVANCE_RIP_AND_FINISH();
1063 IEM_MC_END();
1064 }
1065}
1066
1067
1068/**
1069 * Common worker for SSE2 instructions on the form:
1070 * pxxxx xmm1, xmm2/mem128
1071 *
1072 * The 2nd operand is the second half of a register, which for SSE a 128-bit
1073 * aligned access where it may read the full 128 bits or only the upper 64 bits.
1074 *
1075 * Exceptions type 4.
1076 */
1077FNIEMOP_DEF_1(iemOpCommonSse2_HighHigh_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
1078{
1079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1080 if (IEM_IS_MODRM_REG_MODE(bRm))
1081 {
1082 /*
1083 * XMM, XMM.
1084 */
1085 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1087 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1088 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1089 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1090 IEM_MC_PREPARE_SSE_USAGE();
1091 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1092 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1093 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1094 IEM_MC_ADVANCE_RIP_AND_FINISH();
1095 IEM_MC_END();
1096 }
1097 else
1098 {
1099 /*
1100 * XMM, [mem128].
1101 */
1102 IEM_MC_BEGIN(2, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1103 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1104 IEM_MC_LOCAL(RTUINT128U, uSrc);
1105 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1107
1108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
1110 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1111 /** @todo Most CPUs probably only read the high qword. We read everything to
1112 * make sure we apply segmentation and alignment checks correctly.
1113 * When we have time, it would be interesting to explore what real
1114 * CPUs actually does and whether it will do a TLB load for the lower
1115 * part or skip any associated \#PF. */
1116 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1117
1118 IEM_MC_PREPARE_SSE_USAGE();
1119 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1120 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
1121
1122 IEM_MC_ADVANCE_RIP_AND_FINISH();
1123 IEM_MC_END();
1124 }
1125}
1126
1127
1128/**
1129 * Common worker for SSE3 instructions on the forms:
1130 * hxxx xmm1, xmm2/mem128
1131 *
1132 * Proper alignment of the 128-bit operand is enforced.
1133 * Exceptions type 2. SSE3 cpuid checks.
1134 *
1135 * @sa iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse2_FullFull_To_Full
1136 */
1137FNIEMOP_DEF_1(iemOpCommonSse3Fp_FullFull_To_Full, PFNIEMAIMPLFPSSEF2U128, pfnU128)
1138{
1139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1140 if (IEM_IS_MODRM_REG_MODE(bRm))
1141 {
1142 /*
1143 * XMM, XMM.
1144 */
1145 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1147 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1148 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1149 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1150 IEM_MC_ARG(PCX86XMMREG, pSrc2, 2);
1151 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1152 IEM_MC_PREPARE_SSE_USAGE();
1153 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1154 IEM_MC_REF_XREG_XMM_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1155 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1156 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1157 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1158
1159 IEM_MC_ADVANCE_RIP_AND_FINISH();
1160 IEM_MC_END();
1161 }
1162 else
1163 {
1164 /*
1165 * XMM, [mem128].
1166 */
1167 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1168 IEM_MC_LOCAL(IEMSSERESULT, SseRes);
1169 IEM_MC_LOCAL(X86XMMREG, uSrc2);
1170 IEM_MC_ARG_LOCAL_REF(PIEMSSERESULT, pSseRes, SseRes, 0);
1171 IEM_MC_ARG(PCX86XMMREG, pSrc1, 1);
1172 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc2, uSrc2, 2);
1173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1174
1175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
1177 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1178 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1179
1180 IEM_MC_PREPARE_SSE_USAGE();
1181 IEM_MC_REF_XREG_XMM_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
1182 IEM_MC_CALL_SSE_AIMPL_3(pfnU128, pSseRes, pSrc1, pSrc2);
1183 IEM_MC_STORE_SSE_RESULT(SseRes, IEM_GET_MODRM_REG(pVCpu, bRm));
1184 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
1185
1186 IEM_MC_ADVANCE_RIP_AND_FINISH();
1187 IEM_MC_END();
1188 }
1189}
1190
1191
1192/** Opcode 0x0f 0x00 /0. */
1193FNIEMOPRM_DEF(iemOp_Grp6_sldt)
1194{
1195 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw");
1196 IEMOP_HLP_MIN_286();
1197 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1198
1199 if (IEM_IS_MODRM_REG_MODE(bRm))
1200 {
1201 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1202 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_sldt_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1203 }
1204
1205 /* Ignore operand size here, memory refs are always 16-bit. */
1206 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1207 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1209 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1210 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1211 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sldt_mem, iEffSeg, GCPtrEffDst);
1212 IEM_MC_END();
1213}
1214
1215
1216/** Opcode 0x0f 0x00 /1. */
1217FNIEMOPRM_DEF(iemOp_Grp6_str)
1218{
1219 IEMOP_MNEMONIC(str, "str Rv/Mw");
1220 IEMOP_HLP_MIN_286();
1221 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1222
1223
1224 if (IEM_IS_MODRM_REG_MODE(bRm))
1225 {
1226 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1227 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_str_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1228 }
1229
1230 /* Ignore operand size here, memory refs are always 16-bit. */
1231 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1232 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1234 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1235 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1236 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_str_mem, iEffSeg, GCPtrEffDst);
1237 IEM_MC_END();
1238}
1239
1240
1241/** Opcode 0x0f 0x00 /2. */
1242FNIEMOPRM_DEF(iemOp_Grp6_lldt)
1243{
1244 IEMOP_MNEMONIC(lldt, "lldt Ew");
1245 IEMOP_HLP_MIN_286();
1246 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1247
1248 if (IEM_IS_MODRM_REG_MODE(bRm))
1249 {
1250 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_286, 0);
1251 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1252 IEM_MC_ARG(uint16_t, u16Sel, 0);
1253 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1254 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_lldt, u16Sel);
1255 IEM_MC_END();
1256 }
1257 else
1258 {
1259 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_286, 0);
1260 IEM_MC_ARG(uint16_t, u16Sel, 0);
1261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1263 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
1264 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1265 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1266 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_lldt, u16Sel);
1267 IEM_MC_END();
1268 }
1269}
1270
1271
1272/** Opcode 0x0f 0x00 /3. */
1273FNIEMOPRM_DEF(iemOp_Grp6_ltr)
1274{
1275 IEMOP_MNEMONIC(ltr, "ltr Ew");
1276 IEMOP_HLP_MIN_286();
1277 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1278
1279 if (IEM_IS_MODRM_REG_MODE(bRm))
1280 {
1281 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_286, 0);
1282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1283 IEM_MC_ARG(uint16_t, u16Sel, 0);
1284 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1285 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_ltr, u16Sel);
1286 IEM_MC_END();
1287 }
1288 else
1289 {
1290 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_286, 0);
1291 IEM_MC_ARG(uint16_t, u16Sel, 0);
1292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1295 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
1296 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1297 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_ltr, u16Sel);
1298 IEM_MC_END();
1299 }
1300}
1301
1302
1303/** Opcode 0x0f 0x00 /3. */
1304FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
1305{
1306 IEMOP_HLP_MIN_286();
1307 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1308
1309 if (IEM_IS_MODRM_REG_MODE(bRm))
1310 {
1311 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1312 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1313 IEM_MC_ARG(uint16_t, u16Sel, 0);
1314 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1315 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1316 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_VerX, u16Sel, fWriteArg);
1317 IEM_MC_END();
1318 }
1319 else
1320 {
1321 IEM_MC_BEGIN(2, 1, IEM_MC_F_MIN_286, 0);
1322 IEM_MC_ARG(uint16_t, u16Sel, 0);
1323 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
1324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1326 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1327 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1328 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_VerX, u16Sel, fWriteArg);
1329 IEM_MC_END();
1330 }
1331}
1332
1333
1334/** Opcode 0x0f 0x00 /4. */
1335FNIEMOPRM_DEF(iemOp_Grp6_verr)
1336{
1337 IEMOP_MNEMONIC(verr, "verr Ew");
1338 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
1339}
1340
1341
1342/** Opcode 0x0f 0x00 /5. */
1343FNIEMOPRM_DEF(iemOp_Grp6_verw)
1344{
1345 IEMOP_MNEMONIC(verw, "verw Ew");
1346 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
1347}
1348
1349
1350/**
1351 * Group 6 jump table.
1352 */
1353IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] =
1354{
1355 iemOp_Grp6_sldt,
1356 iemOp_Grp6_str,
1357 iemOp_Grp6_lldt,
1358 iemOp_Grp6_ltr,
1359 iemOp_Grp6_verr,
1360 iemOp_Grp6_verw,
1361 iemOp_InvalidWithRM,
1362 iemOp_InvalidWithRM
1363};
1364
1365/** Opcode 0x0f 0x00. */
1366FNIEMOP_DEF(iemOp_Grp6)
1367{
1368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1369 return FNIEMOP_CALL_1(g_apfnGroup6[IEM_GET_MODRM_REG_8(bRm)], bRm);
1370}
1371
1372
1373/** Opcode 0x0f 0x01 /0. */
1374FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
1375{
1376 IEMOP_MNEMONIC(sgdt, "sgdt Ms");
1377 IEMOP_HLP_MIN_286();
1378 IEMOP_HLP_64BIT_OP_SIZE();
1379 IEM_MC_BEGIN(2, 1, IEM_MC_F_MIN_286, 0);
1380 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1383 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1384 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sgdt, iEffSeg, GCPtrEffSrc);
1385 IEM_MC_END();
1386}
1387
1388
1389/** Opcode 0x0f 0x01 /0. */
1390FNIEMOP_DEF(iemOp_Grp7_vmcall)
1391{
1392 IEMOP_MNEMONIC(vmcall, "vmcall");
1393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the VMX instructions. ASSUMING no lock for now. */
1394
1395 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1396 want all hypercalls regardless of instruction used, and if a
1397 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1398 (NEM/win makes ASSUMPTIONS about this behavior.) */
1399 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, iemCImpl_vmcall);
1400}
1401
1402
1403/** Opcode 0x0f 0x01 /0. */
1404#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1405FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1406{
1407 IEMOP_MNEMONIC(vmlaunch, "vmlaunch");
1408 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVDiag_Vmentry);
1409 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry);
1410 IEMOP_HLP_DONE_DECODING();
1411 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1412 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
1413 iemCImpl_vmlaunch);
1414}
1415#else
1416FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
1417{
1418 IEMOP_BITCH_ABOUT_STUB();
1419 IEMOP_RAISE_INVALID_OPCODE_RET();
1420}
1421#endif
1422
1423
1424/** Opcode 0x0f 0x01 /0. */
1425#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1426FNIEMOP_DEF(iemOp_Grp7_vmresume)
1427{
1428 IEMOP_MNEMONIC(vmresume, "vmresume");
1429 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVDiag_Vmentry);
1430 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry);
1431 IEMOP_HLP_DONE_DECODING();
1432 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1433 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
1434 iemCImpl_vmresume);
1435}
1436#else
1437FNIEMOP_DEF(iemOp_Grp7_vmresume)
1438{
1439 IEMOP_BITCH_ABOUT_STUB();
1440 IEMOP_RAISE_INVALID_OPCODE_RET();
1441}
1442#endif
1443
1444
1445/** Opcode 0x0f 0x01 /0. */
1446#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1447FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1448{
1449 IEMOP_MNEMONIC(vmxoff, "vmxoff");
1450 IEMOP_HLP_IN_VMX_OPERATION("vmxoff", kVmxVDiag_Vmxoff);
1451 IEMOP_HLP_VMX_INSTR("vmxoff", kVmxVDiag_Vmxoff);
1452 IEMOP_HLP_DONE_DECODING();
1453 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmxoff);
1454}
1455#else
1456FNIEMOP_DEF(iemOp_Grp7_vmxoff)
1457{
1458 IEMOP_BITCH_ABOUT_STUB();
1459 IEMOP_RAISE_INVALID_OPCODE_RET();
1460}
1461#endif
1462
1463
1464/** Opcode 0x0f 0x01 /1. */
1465FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
1466{
1467 IEMOP_MNEMONIC(sidt, "sidt Ms");
1468 IEMOP_HLP_MIN_286();
1469 IEMOP_HLP_64BIT_OP_SIZE();
1470 IEM_MC_BEGIN(2, 1, IEM_MC_F_MIN_286, 0);
1471 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1474 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1475 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_sidt, iEffSeg, GCPtrEffSrc);
1476 IEM_MC_END();
1477}
1478
1479
1480/** Opcode 0x0f 0x01 /1. */
1481FNIEMOP_DEF(iemOp_Grp7_monitor)
1482{
1483 IEMOP_MNEMONIC(monitor, "monitor");
1484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
1485 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_monitor, pVCpu->iem.s.iEffSeg);
1486}
1487
1488
1489/** Opcode 0x0f 0x01 /1. */
1490FNIEMOP_DEF(iemOp_Grp7_mwait)
1491{
1492 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
1493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1494 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, iemCImpl_mwait);
1495}
1496
1497
1498/** Opcode 0x0f 0x01 /2. */
1499FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
1500{
1501 IEMOP_MNEMONIC(lgdt, "lgdt");
1502 IEMOP_HLP_64BIT_OP_SIZE();
1503 IEM_MC_BEGIN(3, 1, 0, 0);
1504 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1507 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1508 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2);
1509 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1510 IEM_MC_END();
1511}
1512
1513
1514/** Opcode 0x0f 0x01 0xd0. */
1515FNIEMOP_DEF(iemOp_Grp7_xgetbv)
1516{
1517 IEMOP_MNEMONIC(xgetbv, "xgetbv");
1518 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1519 {
1520 /** @todo r=ramshankar: We should use
1521 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1522 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1523/** @todo testcase: test prefixes and exceptions. currently not checking for the
1524 * OPSIZE one ... */
1525 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1526 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_xgetbv);
1527 }
1528 IEMOP_RAISE_INVALID_OPCODE_RET();
1529}
1530
1531
1532/** Opcode 0x0f 0x01 0xd1. */
1533FNIEMOP_DEF(iemOp_Grp7_xsetbv)
1534{
1535 IEMOP_MNEMONIC(xsetbv, "xsetbv");
1536 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
1537 {
1538 /** @todo r=ramshankar: We should use
1539 * IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX and
1540 * IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES here. */
1541/** @todo testcase: test prefixes and exceptions. currently not checking for the
1542 * OPSIZE one ... */
1543 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
1544 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_xsetbv);
1545 }
1546 IEMOP_RAISE_INVALID_OPCODE_RET();
1547}
1548
1549
1550/** Opcode 0x0f 0x01 /3. */
1551FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
1552{
1553 IEMOP_MNEMONIC(lidt, "lidt");
1554 IEMMODE enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : pVCpu->iem.s.enmEffOpSize;
1555 IEM_MC_BEGIN(3, 1, 0, 0);
1556 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
1557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1559 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1560 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg, /*=*/ enmEffOpSize, 2);
1561 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
1562 IEM_MC_END();
1563}
1564
1565
1566/** Opcode 0x0f 0x01 0xd8. */
1567#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1568FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun)
1569{
1570 IEMOP_MNEMONIC(vmrun, "vmrun");
1571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1572 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1573 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
1574 iemCImpl_vmrun);
1575}
1576#else
1577FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
1578#endif
1579
1580/** Opcode 0x0f 0x01 0xd9. */
1581FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall)
1582{
1583 IEMOP_MNEMONIC(vmmcall, "vmmcall");
1584 /** @todo r=bird: Table A-8 on page 524 in vol 3 has VMGEXIT for this
1585 * opcode sequence when F3 or F2 is used as prefix. So, the assumtion
1586 * here cannot be right... */
1587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1588
1589 /* Note! We do not check any CPUMFEATURES::fSvm here as we (GIM) generally
1590 want all hypercalls regardless of instruction used, and if a
1591 hypercall isn't handled by GIM or HMSvm will raise an #UD.
1592 (NEM/win makes ASSUMPTIONS about this behavior.) */
1593 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmmcall);
1594}
1595
1596/** Opcode 0x0f 0x01 0xda. */
1597#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1598FNIEMOP_DEF(iemOp_Grp7_Amd_vmload)
1599{
1600 IEMOP_MNEMONIC(vmload, "vmload");
1601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1602 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmload);
1603}
1604#else
1605FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
1606#endif
1607
1608
1609/** Opcode 0x0f 0x01 0xdb. */
1610#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1611FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave)
1612{
1613 IEMOP_MNEMONIC(vmsave, "vmsave");
1614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1615 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmsave);
1616}
1617#else
1618FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
1619#endif
1620
1621
1622/** Opcode 0x0f 0x01 0xdc. */
1623#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1624FNIEMOP_DEF(iemOp_Grp7_Amd_stgi)
1625{
1626 IEMOP_MNEMONIC(stgi, "stgi");
1627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1628 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_stgi);
1629}
1630#else
1631FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
1632#endif
1633
1634
1635/** Opcode 0x0f 0x01 0xdd. */
1636#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1637FNIEMOP_DEF(iemOp_Grp7_Amd_clgi)
1638{
1639 IEMOP_MNEMONIC(clgi, "clgi");
1640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1641 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_clgi);
1642}
1643#else
1644FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
1645#endif
1646
1647
1648/** Opcode 0x0f 0x01 0xdf. */
1649#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1650FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga)
1651{
1652 IEMOP_MNEMONIC(invlpga, "invlpga");
1653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1654 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_invlpga);
1655}
1656#else
1657FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
1658#endif
1659
1660
1661/** Opcode 0x0f 0x01 0xde. */
1662#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
1663FNIEMOP_DEF(iemOp_Grp7_Amd_skinit)
1664{
1665 IEMOP_MNEMONIC(skinit, "skinit");
1666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */
1667 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_skinit);
1668}
1669#else
1670FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
1671#endif
1672
1673
1674/** Opcode 0x0f 0x01 /4. */
1675FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
1676{
1677 IEMOP_MNEMONIC(smsw, "smsw");
1678 IEMOP_HLP_MIN_286();
1679 if (IEM_IS_MODRM_REG_MODE(bRm))
1680 {
1681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1682 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_smsw_reg, IEM_GET_MODRM_RM(pVCpu, bRm), pVCpu->iem.s.enmEffOpSize);
1683 }
1684
1685 /* Ignore operand size here, memory refs are always 16-bit. */
1686 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1687 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1690 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1691 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_smsw_mem, iEffSeg, GCPtrEffDst);
1692 IEM_MC_END();
1693}
1694
1695
1696/** Opcode 0x0f 0x01 /6. */
1697FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1698{
1699 /* The operand size is effectively ignored, all is 16-bit and only the
1700 lower 3-bits are used. */
1701 IEMOP_MNEMONIC(lmsw, "lmsw");
1702 IEMOP_HLP_MIN_286();
1703 if (IEM_IS_MODRM_REG_MODE(bRm))
1704 {
1705 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1707 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1708 IEM_MC_ARG_CONST(RTGCPTR, GCPtrEffDst, NIL_RTGCPTR, 1);
1709 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
1710 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1711 IEM_MC_END();
1712 }
1713 else
1714 {
1715 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_286, 0);
1716 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1717 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1718 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1720 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1721 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT, iemCImpl_lmsw, u16Tmp, GCPtrEffDst);
1722 IEM_MC_END();
1723 }
1724}
1725
1726
1727/** Opcode 0x0f 0x01 /7. */
1728FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1729{
1730 IEMOP_MNEMONIC(invlpg, "invlpg");
1731 IEMOP_HLP_MIN_486();
1732 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
1733 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1736 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_VMEXIT, iemCImpl_invlpg, GCPtrEffDst);
1737 IEM_MC_END();
1738}
1739
1740
1741/** Opcode 0x0f 0x01 0xf8. */
1742FNIEMOP_DEF(iemOp_Grp7_swapgs)
1743{
1744 IEMOP_MNEMONIC(swapgs, "swapgs");
1745 IEMOP_HLP_ONLY_64BIT();
1746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1747 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_swapgs);
1748}
1749
1750
1751/** Opcode 0x0f 0x01 0xf9. */
1752FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1753{
1754 IEMOP_MNEMONIC(rdtscp, "rdtscp");
1755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1756 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdtscp);
1757}
1758
1759
1760/**
1761 * Group 7 jump table, memory variant.
1762 */
1763IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] =
1764{
1765 iemOp_Grp7_sgdt,
1766 iemOp_Grp7_sidt,
1767 iemOp_Grp7_lgdt,
1768 iemOp_Grp7_lidt,
1769 iemOp_Grp7_smsw,
1770 iemOp_InvalidWithRM,
1771 iemOp_Grp7_lmsw,
1772 iemOp_Grp7_invlpg
1773};
1774
1775
1776/** Opcode 0x0f 0x01. */
1777FNIEMOP_DEF(iemOp_Grp7)
1778{
1779 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1780 if (IEM_IS_MODRM_MEM_MODE(bRm))
1781 return FNIEMOP_CALL_1(g_apfnGroup7Mem[IEM_GET_MODRM_REG_8(bRm)], bRm);
1782
1783 switch (IEM_GET_MODRM_REG_8(bRm))
1784 {
1785 case 0:
1786 switch (IEM_GET_MODRM_RM_8(bRm))
1787 {
1788 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1789 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1790 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1791 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1792 }
1793 IEMOP_RAISE_INVALID_OPCODE_RET();
1794
1795 case 1:
1796 switch (IEM_GET_MODRM_RM_8(bRm))
1797 {
1798 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1799 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1800 }
1801 IEMOP_RAISE_INVALID_OPCODE_RET();
1802
1803 case 2:
1804 switch (IEM_GET_MODRM_RM_8(bRm))
1805 {
1806 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1807 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1808 }
1809 IEMOP_RAISE_INVALID_OPCODE_RET();
1810
1811 case 3:
1812 switch (IEM_GET_MODRM_RM_8(bRm))
1813 {
1814 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1815 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1816 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1817 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1818 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1819 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1820 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1821 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1822 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1823 }
1824
1825 case 4:
1826 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1827
1828 case 5:
1829 IEMOP_RAISE_INVALID_OPCODE_RET();
1830
1831 case 6:
1832 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1833
1834 case 7:
1835 switch (IEM_GET_MODRM_RM_8(bRm))
1836 {
1837 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1838 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1839 }
1840 IEMOP_RAISE_INVALID_OPCODE_RET();
1841
1842 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1843 }
1844}
1845
1846/** Opcode 0x0f 0x00 /3. */
1847FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1848{
1849 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1851
1852 if (IEM_IS_MODRM_REG_MODE(bRm))
1853 {
1854 switch (pVCpu->iem.s.enmEffOpSize)
1855 {
1856 case IEMMODE_16BIT:
1857 IEM_MC_BEGIN(3, 0, 0, 0);
1858 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1859 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1860 IEM_MC_ARG(uint16_t, u16Sel, 1);
1861 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1862
1863 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1864 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1865 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1866
1867 IEM_MC_END();
1868 break;
1869
1870 case IEMMODE_32BIT:
1871 case IEMMODE_64BIT:
1872 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
1873 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1874 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1875 IEM_MC_ARG(uint16_t, u16Sel, 1);
1876 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1877
1878 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1879 IEM_MC_FETCH_GREG_U16(u16Sel, IEM_GET_MODRM_RM(pVCpu, bRm));
1880 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1881
1882 IEM_MC_END();
1883 break;
1884
1885 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1886 }
1887 }
1888 else
1889 {
1890 switch (pVCpu->iem.s.enmEffOpSize)
1891 {
1892 case IEMMODE_16BIT:
1893 IEM_MC_BEGIN(3, 1, 0, 0);
1894 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1895 IEM_MC_ARG(uint16_t, u16Sel, 1);
1896 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1898
1899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1900 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1901
1902 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1903 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1904 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg);
1905
1906 IEM_MC_END();
1907 break;
1908
1909 case IEMMODE_32BIT:
1910 case IEMMODE_64BIT:
1911 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
1912 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1913 IEM_MC_ARG(uint16_t, u16Sel, 1);
1914 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2);
1915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1916
1917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1918 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1919/** @todo testcase: make sure it's a 16-bit read. */
1920
1921 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1922 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
1923 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg);
1924
1925 IEM_MC_END();
1926 break;
1927
1928 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1929 }
1930 }
1931}
1932
1933
1934
1935/** Opcode 0x0f 0x02. */
1936FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1937{
1938 IEMOP_MNEMONIC(lar, "lar Gv,Ew");
1939 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1940}
1941
1942
1943/** Opcode 0x0f 0x03. */
1944FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1945{
1946 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew");
1947 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1948}
1949
1950
1951/** Opcode 0x0f 0x05. */
1952FNIEMOP_DEF(iemOp_syscall)
1953{
1954 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */
1955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1956 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1957 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1958 iemCImpl_syscall);
1959}
1960
1961
1962/** Opcode 0x0f 0x06. */
1963FNIEMOP_DEF(iemOp_clts)
1964{
1965 IEMOP_MNEMONIC(clts, "clts");
1966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1967 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_clts);
1968}
1969
1970
1971/** Opcode 0x0f 0x07. */
1972FNIEMOP_DEF(iemOp_sysret)
1973{
1974 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */
1975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1976 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
1977 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,
1978 iemCImpl_sysret, pVCpu->iem.s.enmEffOpSize);
1979}
1980
1981
1982/** Opcode 0x0f 0x08. */
1983FNIEMOP_DEF(iemOp_invd)
1984{
1985 IEMOP_MNEMONIC0(FIXED, INVD, invd, DISOPTYPE_PRIVILEGED, 0);
1986 IEMOP_HLP_MIN_486();
1987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1988 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_invd);
1989}
1990
1991
1992/** Opcode 0x0f 0x09. */
1993FNIEMOP_DEF(iemOp_wbinvd)
1994{
1995 IEMOP_MNEMONIC0(FIXED, WBINVD, wbinvd, DISOPTYPE_PRIVILEGED, 0);
1996 IEMOP_HLP_MIN_486();
1997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1998 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_wbinvd);
1999}
2000
2001
2002/** Opcode 0x0f 0x0b. */
2003FNIEMOP_DEF(iemOp_ud2)
2004{
2005 IEMOP_MNEMONIC(ud2, "ud2");
2006 IEMOP_RAISE_INVALID_OPCODE_RET();
2007}
2008
2009/** Opcode 0x0f 0x0d. */
2010FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
2011{
2012 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
2013 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLongMode && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch)
2014 {
2015 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP");
2016 IEMOP_RAISE_INVALID_OPCODE_RET();
2017 }
2018
2019 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2020 if (IEM_IS_MODRM_REG_MODE(bRm))
2021 {
2022 IEMOP_MNEMONIC(GrpPInvalid, "GrpP");
2023 IEMOP_RAISE_INVALID_OPCODE_RET();
2024 }
2025
2026 switch (IEM_GET_MODRM_REG_8(bRm))
2027 {
2028 case 2: /* Aliased to /0 for the time being. */
2029 case 4: /* Aliased to /0 for the time being. */
2030 case 5: /* Aliased to /0 for the time being. */
2031 case 6: /* Aliased to /0 for the time being. */
2032 case 7: /* Aliased to /0 for the time being. */
2033 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break;
2034 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break;
2035 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break;
2036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2037 }
2038
2039 IEM_MC_BEGIN(0, 1, 0, 0);
2040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2043 /* Currently a NOP. */
2044 NOREF(GCPtrEffSrc);
2045 IEM_MC_ADVANCE_RIP_AND_FINISH();
2046 IEM_MC_END();
2047}
2048
2049
2050/** Opcode 0x0f 0x0e. */
2051FNIEMOP_DEF(iemOp_femms)
2052{
2053 IEMOP_MNEMONIC(femms, "femms");
2054
2055 IEM_MC_BEGIN(0, 0, 0, 0);
2056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2057 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
2058 IEM_MC_MAYBE_RAISE_FPU_XCPT();
2059 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
2060 IEM_MC_FPU_FROM_MMX_MODE();
2061 IEM_MC_ADVANCE_RIP_AND_FINISH();
2062 IEM_MC_END();
2063}
2064
2065
2066/** Opcode 0x0f 0x0f. */
2067FNIEMOP_DEF(iemOp_3Dnow)
2068{
2069 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow)
2070 {
2071 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow");
2072 IEMOP_RAISE_INVALID_OPCODE_RET();
2073 }
2074
2075#ifdef IEM_WITH_3DNOW
2076 /* This is pretty sparse, use switch instead of table. */
2077 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2078 return FNIEMOP_CALL_1(iemOp_3DNowDispatcher, b);
2079#else
2080 IEMOP_BITCH_ABOUT_STUB();
2081 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2082#endif
2083}
2084
2085
2086/**
2087 * @opcode 0x10
2088 * @oppfx none
2089 * @opcpuid sse
2090 * @opgroup og_sse_simdfp_datamove
2091 * @opxcpttype 4UA
2092 * @optest op1=1 op2=2 -> op1=2
2093 * @optest op1=0 op2=-22 -> op1=-22
2094 */
2095FNIEMOP_DEF(iemOp_movups_Vps_Wps)
2096{
2097 IEMOP_MNEMONIC2(RM, MOVUPS, movups, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2098 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2099 if (IEM_IS_MODRM_REG_MODE(bRm))
2100 {
2101 /*
2102 * XMM128, XMM128.
2103 */
2104 IEM_MC_BEGIN(0, 0, 0, 0);
2105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2106 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2107 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2108 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2109 IEM_GET_MODRM_RM(pVCpu, bRm));
2110 IEM_MC_ADVANCE_RIP_AND_FINISH();
2111 IEM_MC_END();
2112 }
2113 else
2114 {
2115 /*
2116 * XMM128, [mem128].
2117 */
2118 IEM_MC_BEGIN(0, 2, 0, 0);
2119 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2121
2122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2124 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2125 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2126
2127 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2128 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2129
2130 IEM_MC_ADVANCE_RIP_AND_FINISH();
2131 IEM_MC_END();
2132 }
2133
2134}
2135
2136
2137/**
2138 * @opcode 0x10
2139 * @oppfx 0x66
2140 * @opcpuid sse2
2141 * @opgroup og_sse2_pcksclr_datamove
2142 * @opxcpttype 4UA
2143 * @optest op1=1 op2=2 -> op1=2
2144 * @optest op1=0 op2=-42 -> op1=-42
2145 */
2146FNIEMOP_DEF(iemOp_movupd_Vpd_Wpd)
2147{
2148 IEMOP_MNEMONIC2(RM, MOVUPD, movupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2149 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2150 if (IEM_IS_MODRM_REG_MODE(bRm))
2151 {
2152 /*
2153 * XMM128, XMM128.
2154 */
2155 IEM_MC_BEGIN(0, 0, 0, 0);
2156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2157 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2158 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2159 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
2160 IEM_GET_MODRM_RM(pVCpu, bRm));
2161 IEM_MC_ADVANCE_RIP_AND_FINISH();
2162 IEM_MC_END();
2163 }
2164 else
2165 {
2166 /*
2167 * XMM128, [mem128].
2168 */
2169 IEM_MC_BEGIN(0, 2, 0, 0);
2170 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2172
2173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2175 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2176 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2177
2178 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2179 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2180
2181 IEM_MC_ADVANCE_RIP_AND_FINISH();
2182 IEM_MC_END();
2183 }
2184}
2185
2186
2187/**
2188 * @opcode 0x10
2189 * @oppfx 0xf3
2190 * @opcpuid sse
2191 * @opgroup og_sse_simdfp_datamove
2192 * @opxcpttype 5
2193 * @optest op1=1 op2=2 -> op1=2
2194 * @optest op1=0 op2=-22 -> op1=-22
2195 */
2196FNIEMOP_DEF(iemOp_movss_Vss_Wss)
2197{
2198 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZx_WO, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2199 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2200 if (IEM_IS_MODRM_REG_MODE(bRm))
2201 {
2202 /*
2203 * XMM32, XMM32.
2204 */
2205 IEM_MC_BEGIN(0, 1, 0, 0);
2206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2207 IEM_MC_LOCAL(uint32_t, uSrc);
2208
2209 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2210 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2211 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ );
2212 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2213
2214 IEM_MC_ADVANCE_RIP_AND_FINISH();
2215 IEM_MC_END();
2216 }
2217 else
2218 {
2219 /*
2220 * XMM128, [mem32].
2221 */
2222 IEM_MC_BEGIN(0, 2, 0, 0);
2223 IEM_MC_LOCAL(uint32_t, uSrc);
2224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2225
2226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2228 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2229 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2230
2231 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2232 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2233
2234 IEM_MC_ADVANCE_RIP_AND_FINISH();
2235 IEM_MC_END();
2236 }
2237}
2238
2239
2240/**
2241 * @opcode 0x10
2242 * @oppfx 0xf2
2243 * @opcpuid sse2
2244 * @opgroup og_sse2_pcksclr_datamove
2245 * @opxcpttype 5
2246 * @optest op1=1 op2=2 -> op1=2
2247 * @optest op1=0 op2=-42 -> op1=-42
2248 */
2249FNIEMOP_DEF(iemOp_movsd_Vsd_Wsd)
2250{
2251 IEMOP_MNEMONIC2(RM, MOVSD, movsd, VsdZx_WO, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2252 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2253 if (IEM_IS_MODRM_REG_MODE(bRm))
2254 {
2255 /*
2256 * XMM64, XMM64.
2257 */
2258 IEM_MC_BEGIN(0, 1, 0, 0);
2259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2260 IEM_MC_LOCAL(uint64_t, uSrc);
2261
2262 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2263 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2264 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2265 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2266
2267 IEM_MC_ADVANCE_RIP_AND_FINISH();
2268 IEM_MC_END();
2269 }
2270 else
2271 {
2272 /*
2273 * XMM128, [mem64].
2274 */
2275 IEM_MC_BEGIN(0, 2, 0, 0);
2276 IEM_MC_LOCAL(uint64_t, uSrc);
2277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2278
2279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2281 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2282 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2283
2284 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2285 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2286
2287 IEM_MC_ADVANCE_RIP_AND_FINISH();
2288 IEM_MC_END();
2289 }
2290}
2291
2292
2293/**
2294 * @opcode 0x11
2295 * @oppfx none
2296 * @opcpuid sse
2297 * @opgroup og_sse_simdfp_datamove
2298 * @opxcpttype 4UA
2299 * @optest op1=1 op2=2 -> op1=2
2300 * @optest op1=0 op2=-42 -> op1=-42
2301 */
2302FNIEMOP_DEF(iemOp_movups_Wps_Vps)
2303{
2304 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2306 if (IEM_IS_MODRM_REG_MODE(bRm))
2307 {
2308 /*
2309 * XMM128, XMM128.
2310 */
2311 IEM_MC_BEGIN(0, 0, 0, 0);
2312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2313 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2314 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2315 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2316 IEM_GET_MODRM_REG(pVCpu, bRm));
2317 IEM_MC_ADVANCE_RIP_AND_FINISH();
2318 IEM_MC_END();
2319 }
2320 else
2321 {
2322 /*
2323 * [mem128], XMM128.
2324 */
2325 IEM_MC_BEGIN(0, 2, 0, 0);
2326 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2328
2329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2331 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2332 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2333
2334 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2335 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2336
2337 IEM_MC_ADVANCE_RIP_AND_FINISH();
2338 IEM_MC_END();
2339 }
2340}
2341
2342
2343/**
2344 * @opcode 0x11
2345 * @oppfx 0x66
2346 * @opcpuid sse2
2347 * @opgroup og_sse2_pcksclr_datamove
2348 * @opxcpttype 4UA
2349 * @optest op1=1 op2=2 -> op1=2
2350 * @optest op1=0 op2=-42 -> op1=-42
2351 */
2352FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd)
2353{
2354 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2355 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2356 if (IEM_IS_MODRM_REG_MODE(bRm))
2357 {
2358 /*
2359 * XMM128, XMM128.
2360 */
2361 IEM_MC_BEGIN(0, 0, 0, 0);
2362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2363 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2364 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2365 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
2366 IEM_GET_MODRM_REG(pVCpu, bRm));
2367 IEM_MC_ADVANCE_RIP_AND_FINISH();
2368 IEM_MC_END();
2369 }
2370 else
2371 {
2372 /*
2373 * [mem128], XMM128.
2374 */
2375 IEM_MC_BEGIN(0, 2, 0, 0);
2376 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
2377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2378
2379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2381 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2382 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2383
2384 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2385 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2386
2387 IEM_MC_ADVANCE_RIP_AND_FINISH();
2388 IEM_MC_END();
2389 }
2390}
2391
2392
2393/**
2394 * @opcode 0x11
2395 * @oppfx 0xf3
2396 * @opcpuid sse
2397 * @opgroup og_sse_simdfp_datamove
2398 * @opxcpttype 5
2399 * @optest op1=1 op2=2 -> op1=2
2400 * @optest op1=0 op2=-22 -> op1=-22
2401 */
2402FNIEMOP_DEF(iemOp_movss_Wss_Vss)
2403{
2404 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2405 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2406 if (IEM_IS_MODRM_REG_MODE(bRm))
2407 {
2408 /*
2409 * XMM32, XMM32.
2410 */
2411 IEM_MC_BEGIN(0, 1, 0, 0);
2412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2413 IEM_MC_LOCAL(uint32_t, uSrc);
2414
2415 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2416 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2417 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2418 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc);
2419
2420 IEM_MC_ADVANCE_RIP_AND_FINISH();
2421 IEM_MC_END();
2422 }
2423 else
2424 {
2425 /*
2426 * [mem32], XMM32.
2427 */
2428 IEM_MC_BEGIN(0, 2, 0, 0);
2429 IEM_MC_LOCAL(uint32_t, uSrc);
2430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2431
2432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2434 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2435 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2436
2437 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
2438 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2439
2440 IEM_MC_ADVANCE_RIP_AND_FINISH();
2441 IEM_MC_END();
2442 }
2443}
2444
2445
2446/**
2447 * @opcode 0x11
2448 * @oppfx 0xf2
2449 * @opcpuid sse2
2450 * @opgroup og_sse2_pcksclr_datamove
2451 * @opxcpttype 5
2452 * @optest op1=1 op2=2 -> op1=2
2453 * @optest op1=0 op2=-42 -> op1=-42
2454 */
2455FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd)
2456{
2457 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2458 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2459 if (IEM_IS_MODRM_REG_MODE(bRm))
2460 {
2461 /*
2462 * XMM64, XMM64.
2463 */
2464 IEM_MC_BEGIN(0, 1, 0, 0);
2465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2466 IEM_MC_LOCAL(uint64_t, uSrc);
2467
2468 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2469 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2470 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2471 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2472
2473 IEM_MC_ADVANCE_RIP_AND_FINISH();
2474 IEM_MC_END();
2475 }
2476 else
2477 {
2478 /*
2479 * [mem64], XMM64.
2480 */
2481 IEM_MC_BEGIN(0, 2, 0, 0);
2482 IEM_MC_LOCAL(uint64_t, uSrc);
2483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2484
2485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2487 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2488 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2489
2490 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2491 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2492
2493 IEM_MC_ADVANCE_RIP_AND_FINISH();
2494 IEM_MC_END();
2495 }
2496}
2497
2498
2499FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps)
2500{
2501 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2502 if (IEM_IS_MODRM_REG_MODE(bRm))
2503 {
2504 /**
2505 * @opcode 0x12
2506 * @opcodesub 11 mr/reg
2507 * @oppfx none
2508 * @opcpuid sse
2509 * @opgroup og_sse_simdfp_datamove
2510 * @opxcpttype 5
2511 * @optest op1=1 op2=2 -> op1=2
2512 * @optest op1=0 op2=-42 -> op1=-42
2513 */
2514 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq_WO, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2515
2516 IEM_MC_BEGIN(0, 1, 0, 0);
2517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2518 IEM_MC_LOCAL(uint64_t, uSrc);
2519
2520 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2521 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2522 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/);
2523 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2524
2525 IEM_MC_ADVANCE_RIP_AND_FINISH();
2526 IEM_MC_END();
2527 }
2528 else
2529 {
2530 /**
2531 * @opdone
2532 * @opcode 0x12
2533 * @opcodesub !11 mr/reg
2534 * @oppfx none
2535 * @opcpuid sse
2536 * @opgroup og_sse_simdfp_datamove
2537 * @opxcpttype 5
2538 * @optest op1=1 op2=2 -> op1=2
2539 * @optest op1=0 op2=-42 -> op1=-42
2540 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps
2541 */
2542 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2543
2544 IEM_MC_BEGIN(0, 2, 0, 0);
2545 IEM_MC_LOCAL(uint64_t, uSrc);
2546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2547
2548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2550 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2551 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2552
2553 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2554 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2555
2556 IEM_MC_ADVANCE_RIP_AND_FINISH();
2557 IEM_MC_END();
2558 }
2559}
2560
2561
2562/**
2563 * @opcode 0x12
2564 * @opcodesub !11 mr/reg
2565 * @oppfx 0x66
2566 * @opcpuid sse2
2567 * @opgroup og_sse2_pcksclr_datamove
2568 * @opxcpttype 5
2569 * @optest op1=1 op2=2 -> op1=2
2570 * @optest op1=0 op2=-42 -> op1=-42
2571 */
2572FNIEMOP_DEF(iemOp_movlpd_Vq_Mq)
2573{
2574 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2575 if (IEM_IS_MODRM_MEM_MODE(bRm))
2576 {
2577 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2578
2579 IEM_MC_BEGIN(0, 2, 0, 0);
2580 IEM_MC_LOCAL(uint64_t, uSrc);
2581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2582
2583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2585 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2586 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2587
2588 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2589 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2590
2591 IEM_MC_ADVANCE_RIP_AND_FINISH();
2592 IEM_MC_END();
2593 }
2594
2595 /**
2596 * @opdone
2597 * @opmnemonic ud660f12m3
2598 * @opcode 0x12
2599 * @opcodesub 11 mr/reg
2600 * @oppfx 0x66
2601 * @opunused immediate
2602 * @opcpuid sse
2603 * @optest ->
2604 */
2605 else
2606 IEMOP_RAISE_INVALID_OPCODE_RET();
2607}
2608
2609
2610/**
2611 * @opcode 0x12
2612 * @oppfx 0xf3
2613 * @opcpuid sse3
2614 * @opgroup og_sse3_pcksclr_datamove
2615 * @opxcpttype 4
2616 * @optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->
2617 * op1=0x00000002000000020000000100000001
2618 */
2619FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq)
2620{
2621 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2622 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2623 if (IEM_IS_MODRM_REG_MODE(bRm))
2624 {
2625 /*
2626 * XMM, XMM.
2627 */
2628 IEM_MC_BEGIN(0, 1, 0, 0);
2629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2630 IEM_MC_LOCAL(RTUINT128U, uSrc);
2631
2632 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2633 IEM_MC_PREPARE_SSE_USAGE();
2634
2635 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2636 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2637 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2638 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2639 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2640
2641 IEM_MC_ADVANCE_RIP_AND_FINISH();
2642 IEM_MC_END();
2643 }
2644 else
2645 {
2646 /*
2647 * XMM, [mem128].
2648 */
2649 IEM_MC_BEGIN(0, 2, 0, 0);
2650 IEM_MC_LOCAL(RTUINT128U, uSrc);
2651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2652
2653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2655 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2656 IEM_MC_PREPARE_SSE_USAGE();
2657
2658 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2659 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
2660 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
2661 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
2662 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
2663
2664 IEM_MC_ADVANCE_RIP_AND_FINISH();
2665 IEM_MC_END();
2666 }
2667}
2668
2669
2670/**
2671 * @opcode 0x12
2672 * @oppfx 0xf2
2673 * @opcpuid sse3
2674 * @opgroup og_sse3_pcksclr_datamove
2675 * @opxcpttype 5
2676 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 ->
2677 * op1=0x22222222111111112222222211111111
2678 */
2679FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq)
2680{
2681 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2682 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2683 if (IEM_IS_MODRM_REG_MODE(bRm))
2684 {
2685 /*
2686 * XMM128, XMM64.
2687 */
2688 IEM_MC_BEGIN(0, 1, 0, 0);
2689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2690 IEM_MC_LOCAL(uint64_t, uSrc);
2691
2692 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2693 IEM_MC_PREPARE_SSE_USAGE();
2694
2695 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2696 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2697 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2698
2699 IEM_MC_ADVANCE_RIP_AND_FINISH();
2700 IEM_MC_END();
2701 }
2702 else
2703 {
2704 /*
2705 * XMM128, [mem64].
2706 */
2707 IEM_MC_BEGIN(0, 2, 0, 0);
2708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2709 IEM_MC_LOCAL(uint64_t, uSrc);
2710
2711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
2713 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2714 IEM_MC_PREPARE_SSE_USAGE();
2715
2716 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2717 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
2718 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2719
2720 IEM_MC_ADVANCE_RIP_AND_FINISH();
2721 IEM_MC_END();
2722 }
2723}
2724
2725
2726/**
2727 * @opcode 0x13
2728 * @opcodesub !11 mr/reg
2729 * @oppfx none
2730 * @opcpuid sse
2731 * @opgroup og_sse_simdfp_datamove
2732 * @opxcpttype 5
2733 * @optest op1=1 op2=2 -> op1=2
2734 * @optest op1=0 op2=-42 -> op1=-42
2735 */
2736FNIEMOP_DEF(iemOp_movlps_Mq_Vq)
2737{
2738 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2739 if (IEM_IS_MODRM_MEM_MODE(bRm))
2740 {
2741 IEMOP_MNEMONIC2(MR_MEM, MOVLPS, movlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2742
2743 IEM_MC_BEGIN(0, 2, 0, 0);
2744 IEM_MC_LOCAL(uint64_t, uSrc);
2745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2746
2747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2749 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2750 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2751
2752 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2753 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2754
2755 IEM_MC_ADVANCE_RIP_AND_FINISH();
2756 IEM_MC_END();
2757 }
2758
2759 /**
2760 * @opdone
2761 * @opmnemonic ud0f13m3
2762 * @opcode 0x13
2763 * @opcodesub 11 mr/reg
2764 * @oppfx none
2765 * @opunused immediate
2766 * @opcpuid sse
2767 * @optest ->
2768 */
2769 else
2770 IEMOP_RAISE_INVALID_OPCODE_RET();
2771}
2772
2773
2774/**
2775 * @opcode 0x13
2776 * @opcodesub !11 mr/reg
2777 * @oppfx 0x66
2778 * @opcpuid sse2
2779 * @opgroup og_sse2_pcksclr_datamove
2780 * @opxcpttype 5
2781 * @optest op1=1 op2=2 -> op1=2
2782 * @optest op1=0 op2=-42 -> op1=-42
2783 */
2784FNIEMOP_DEF(iemOp_movlpd_Mq_Vq)
2785{
2786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2787 if (IEM_IS_MODRM_MEM_MODE(bRm))
2788 {
2789 IEMOP_MNEMONIC2(MR_MEM, MOVLPD, movlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2790
2791 IEM_MC_BEGIN(0, 2, 0, 0);
2792 IEM_MC_LOCAL(uint64_t, uSrc);
2793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2794
2795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
2797 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2798 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
2799
2800 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
2801 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2802
2803 IEM_MC_ADVANCE_RIP_AND_FINISH();
2804 IEM_MC_END();
2805 }
2806
2807 /**
2808 * @opdone
2809 * @opmnemonic ud660f13m3
2810 * @opcode 0x13
2811 * @opcodesub 11 mr/reg
2812 * @oppfx 0x66
2813 * @opunused immediate
2814 * @opcpuid sse
2815 * @optest ->
2816 */
2817 else
2818 IEMOP_RAISE_INVALID_OPCODE_RET();
2819}
2820
2821
2822/**
2823 * @opmnemonic udf30f13
2824 * @opcode 0x13
2825 * @oppfx 0xf3
2826 * @opunused intel-modrm
2827 * @opcpuid sse
2828 * @optest ->
2829 * @opdone
2830 */
2831
2832/**
2833 * @opmnemonic udf20f13
2834 * @opcode 0x13
2835 * @oppfx 0xf2
2836 * @opunused intel-modrm
2837 * @opcpuid sse
2838 * @optest ->
2839 * @opdone
2840 */
2841
2842/** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/
2843FNIEMOP_DEF(iemOp_unpcklps_Vx_Wx)
2844{
2845 IEMOP_MNEMONIC2(RM, UNPCKLPS, unpcklps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2846 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, iemAImpl_unpcklps_u128);
2847}
2848
2849
2850/** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */
2851FNIEMOP_DEF(iemOp_unpcklpd_Vx_Wx)
2852{
2853 IEMOP_MNEMONIC2(RM, UNPCKLPD, unpcklpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2854 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_unpcklpd_u128);
2855}
2856
2857
2858/**
2859 * @opdone
2860 * @opmnemonic udf30f14
2861 * @opcode 0x14
2862 * @oppfx 0xf3
2863 * @opunused intel-modrm
2864 * @opcpuid sse
2865 * @optest ->
2866 * @opdone
2867 */
2868
2869/**
2870 * @opmnemonic udf20f14
2871 * @opcode 0x14
2872 * @oppfx 0xf2
2873 * @opunused intel-modrm
2874 * @opcpuid sse
2875 * @optest ->
2876 * @opdone
2877 */
2878
2879/** Opcode 0x0f 0x15 - unpckhps Vx, Wx */
2880FNIEMOP_DEF(iemOp_unpckhps_Vx_Wx)
2881{
2882 IEMOP_MNEMONIC2(RM, UNPCKHPS, unpckhps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2883 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, iemAImpl_unpckhps_u128);
2884}
2885
2886
2887/** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */
2888FNIEMOP_DEF(iemOp_unpckhpd_Vx_Wx)
2889{
2890 IEMOP_MNEMONIC2(RM, UNPCKHPD, unpckhpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
2891 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_unpckhpd_u128);
2892}
2893
2894
2895/* Opcode 0xf3 0x0f 0x15 - invalid */
2896/* Opcode 0xf2 0x0f 0x15 - invalid */
2897
2898/**
2899 * @opdone
2900 * @opmnemonic udf30f15
2901 * @opcode 0x15
2902 * @oppfx 0xf3
2903 * @opunused intel-modrm
2904 * @opcpuid sse
2905 * @optest ->
2906 * @opdone
2907 */
2908
2909/**
2910 * @opmnemonic udf20f15
2911 * @opcode 0x15
2912 * @oppfx 0xf2
2913 * @opunused intel-modrm
2914 * @opcpuid sse
2915 * @optest ->
2916 * @opdone
2917 */
2918
2919FNIEMOP_DEF(iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq)
2920{
2921 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2922 if (IEM_IS_MODRM_REG_MODE(bRm))
2923 {
2924 /**
2925 * @opcode 0x16
2926 * @opcodesub 11 mr/reg
2927 * @oppfx none
2928 * @opcpuid sse
2929 * @opgroup og_sse_simdfp_datamove
2930 * @opxcpttype 5
2931 * @optest op1=1 op2=2 -> op1=2
2932 * @optest op1=0 op2=-42 -> op1=-42
2933 */
2934 IEMOP_MNEMONIC2(RM_REG, MOVLHPS, movlhps, VqHi_WO, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2935
2936 IEM_MC_BEGIN(0, 1, 0, 0);
2937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2938 IEM_MC_LOCAL(uint64_t, uSrc);
2939
2940 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2941 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2942 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
2943 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2944
2945 IEM_MC_ADVANCE_RIP_AND_FINISH();
2946 IEM_MC_END();
2947 }
2948 else
2949 {
2950 /**
2951 * @opdone
2952 * @opcode 0x16
2953 * @opcodesub !11 mr/reg
2954 * @oppfx none
2955 * @opcpuid sse
2956 * @opgroup og_sse_simdfp_datamove
2957 * @opxcpttype 5
2958 * @optest op1=1 op2=2 -> op1=2
2959 * @optest op1=0 op2=-42 -> op1=-42
2960 * @opfunction iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq
2961 */
2962 IEMOP_MNEMONIC2(RM_MEM, MOVHPS, movhps, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2963
2964 IEM_MC_BEGIN(0, 2, 0, 0);
2965 IEM_MC_LOCAL(uint64_t, uSrc);
2966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2967
2968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
2970 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2971 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
2972
2973 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2974 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2975
2976 IEM_MC_ADVANCE_RIP_AND_FINISH();
2977 IEM_MC_END();
2978 }
2979}
2980
2981
2982/**
2983 * @opcode 0x16
2984 * @opcodesub !11 mr/reg
2985 * @oppfx 0x66
2986 * @opcpuid sse2
2987 * @opgroup og_sse2_pcksclr_datamove
2988 * @opxcpttype 5
2989 * @optest op1=1 op2=2 -> op1=2
2990 * @optest op1=0 op2=-42 -> op1=-42
2991 */
2992FNIEMOP_DEF(iemOp_movhpd_Vdq_Mq)
2993{
2994 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2995 if (IEM_IS_MODRM_MEM_MODE(bRm))
2996 {
2997 IEMOP_MNEMONIC2(RM_MEM, MOVHPD, movhpd, VqHi_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
2998
2999 IEM_MC_BEGIN(0, 2, 0, 0);
3000 IEM_MC_LOCAL(uint64_t, uSrc);
3001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3002
3003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3005 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3006 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3007
3008 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3009 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3010
3011 IEM_MC_ADVANCE_RIP_AND_FINISH();
3012 IEM_MC_END();
3013 }
3014
3015 /**
3016 * @opdone
3017 * @opmnemonic ud660f16m3
3018 * @opcode 0x16
3019 * @opcodesub 11 mr/reg
3020 * @oppfx 0x66
3021 * @opunused immediate
3022 * @opcpuid sse
3023 * @optest ->
3024 */
3025 else
3026 IEMOP_RAISE_INVALID_OPCODE_RET();
3027}
3028
3029
3030/**
3031 * @opcode 0x16
3032 * @oppfx 0xf3
3033 * @opcpuid sse3
3034 * @opgroup og_sse3_pcksclr_datamove
3035 * @opxcpttype 4
3036 * @optest op1=-1 op2=0x00000002dddddddd00000001eeeeeeee ->
3037 * op1=0x00000002000000020000000100000001
3038 */
3039FNIEMOP_DEF(iemOp_movshdup_Vdq_Wdq)
3040{
3041 IEMOP_MNEMONIC2(RM, MOVSHDUP, movshdup, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3042 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3043 if (IEM_IS_MODRM_REG_MODE(bRm))
3044 {
3045 /*
3046 * XMM128, XMM128.
3047 */
3048 IEM_MC_BEGIN(0, 1, 0, 0);
3049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3050 IEM_MC_LOCAL(RTUINT128U, uSrc);
3051
3052 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3053 IEM_MC_PREPARE_SSE_USAGE();
3054
3055 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3056 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3057 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3058 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3059 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3060
3061 IEM_MC_ADVANCE_RIP_AND_FINISH();
3062 IEM_MC_END();
3063 }
3064 else
3065 {
3066 /*
3067 * XMM128, [mem128].
3068 */
3069 IEM_MC_BEGIN(0, 2, 0, 0);
3070 IEM_MC_LOCAL(RTUINT128U, uSrc);
3071 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3072
3073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
3075 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3076 IEM_MC_PREPARE_SSE_USAGE();
3077
3078 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3079 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
3080 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
3081 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
3082 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
3083
3084 IEM_MC_ADVANCE_RIP_AND_FINISH();
3085 IEM_MC_END();
3086 }
3087}
3088
3089/**
3090 * @opdone
3091 * @opmnemonic udf30f16
3092 * @opcode 0x16
3093 * @oppfx 0xf2
3094 * @opunused intel-modrm
3095 * @opcpuid sse
3096 * @optest ->
3097 * @opdone
3098 */
3099
3100
3101/**
3102 * @opcode 0x17
3103 * @opcodesub !11 mr/reg
3104 * @oppfx none
3105 * @opcpuid sse
3106 * @opgroup og_sse_simdfp_datamove
3107 * @opxcpttype 5
3108 * @optest op1=1 op2=2 -> op1=2
3109 * @optest op1=0 op2=-42 -> op1=-42
3110 */
3111FNIEMOP_DEF(iemOp_movhps_Mq_Vq)
3112{
3113 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3114 if (IEM_IS_MODRM_MEM_MODE(bRm))
3115 {
3116 IEMOP_MNEMONIC2(MR_MEM, MOVHPS, movhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3117
3118 IEM_MC_BEGIN(0, 2, 0, 0);
3119 IEM_MC_LOCAL(uint64_t, uSrc);
3120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3121
3122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3124 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3125 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3126
3127 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3128 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3129
3130 IEM_MC_ADVANCE_RIP_AND_FINISH();
3131 IEM_MC_END();
3132 }
3133
3134 /**
3135 * @opdone
3136 * @opmnemonic ud0f17m3
3137 * @opcode 0x17
3138 * @opcodesub 11 mr/reg
3139 * @oppfx none
3140 * @opunused immediate
3141 * @opcpuid sse
3142 * @optest ->
3143 */
3144 else
3145 IEMOP_RAISE_INVALID_OPCODE_RET();
3146}
3147
3148
3149/**
3150 * @opcode 0x17
3151 * @opcodesub !11 mr/reg
3152 * @oppfx 0x66
3153 * @opcpuid sse2
3154 * @opgroup og_sse2_pcksclr_datamove
3155 * @opxcpttype 5
3156 * @optest op1=1 op2=2 -> op1=2
3157 * @optest op1=0 op2=-42 -> op1=-42
3158 */
3159FNIEMOP_DEF(iemOp_movhpd_Mq_Vq)
3160{
3161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3162 if (IEM_IS_MODRM_MEM_MODE(bRm))
3163 {
3164 IEMOP_MNEMONIC2(MR_MEM, MOVHPD, movhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3165
3166 IEM_MC_BEGIN(0, 2, 0, 0);
3167 IEM_MC_LOCAL(uint64_t, uSrc);
3168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3169
3170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3172 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3173 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3174
3175 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/);
3176 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3177
3178 IEM_MC_ADVANCE_RIP_AND_FINISH();
3179 IEM_MC_END();
3180 }
3181
3182 /**
3183 * @opdone
3184 * @opmnemonic ud660f17m3
3185 * @opcode 0x17
3186 * @opcodesub 11 mr/reg
3187 * @oppfx 0x66
3188 * @opunused immediate
3189 * @opcpuid sse
3190 * @optest ->
3191 */
3192 else
3193 IEMOP_RAISE_INVALID_OPCODE_RET();
3194}
3195
3196
3197/**
3198 * @opdone
3199 * @opmnemonic udf30f17
3200 * @opcode 0x17
3201 * @oppfx 0xf3
3202 * @opunused intel-modrm
3203 * @opcpuid sse
3204 * @optest ->
3205 * @opdone
3206 */
3207
3208/**
3209 * @opmnemonic udf20f17
3210 * @opcode 0x17
3211 * @oppfx 0xf2
3212 * @opunused intel-modrm
3213 * @opcpuid sse
3214 * @optest ->
3215 * @opdone
3216 */
3217
3218
3219/** Opcode 0x0f 0x18. */
3220FNIEMOP_DEF(iemOp_prefetch_Grp16)
3221{
3222 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3223 if (IEM_IS_MODRM_MEM_MODE(bRm))
3224 {
3225 switch (IEM_GET_MODRM_REG_8(bRm))
3226 {
3227 case 4: /* Aliased to /0 for the time being according to AMD. */
3228 case 5: /* Aliased to /0 for the time being according to AMD. */
3229 case 6: /* Aliased to /0 for the time being according to AMD. */
3230 case 7: /* Aliased to /0 for the time being according to AMD. */
3231 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break;
3232 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break;
3233 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break;
3234 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break;
3235 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3236 }
3237
3238 IEM_MC_BEGIN(0, 1, 0, 0);
3239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3242 /* Currently a NOP. */
3243 NOREF(GCPtrEffSrc);
3244 IEM_MC_ADVANCE_RIP_AND_FINISH();
3245 IEM_MC_END();
3246 }
3247 else
3248 IEMOP_RAISE_INVALID_OPCODE_RET();
3249}
3250
3251
3252/** Opcode 0x0f 0x19..0x1f. */
3253FNIEMOP_DEF(iemOp_nop_Ev)
3254{
3255 IEMOP_MNEMONIC(nop_Ev, "nop Ev");
3256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3257 if (IEM_IS_MODRM_REG_MODE(bRm))
3258 {
3259 IEM_MC_BEGIN(0, 0, 0, 0);
3260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3261 IEM_MC_ADVANCE_RIP_AND_FINISH();
3262 IEM_MC_END();
3263 }
3264 else
3265 {
3266 IEM_MC_BEGIN(0, 1, 0, 0);
3267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3268 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3270 /* Currently a NOP. */
3271 NOREF(GCPtrEffSrc);
3272 IEM_MC_ADVANCE_RIP_AND_FINISH();
3273 IEM_MC_END();
3274 }
3275}
3276
3277
3278/** Opcode 0x0f 0x20. */
3279FNIEMOP_DEF(iemOp_mov_Rd_Cd)
3280{
3281 /* mod is ignored, as is operand size overrides. */
3282 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd");
3283 IEMOP_HLP_MIN_386();
3284 if (IEM_IS_64BIT_CODE(pVCpu))
3285 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3286 else
3287 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3288
3289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3290 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3291 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3292 {
3293 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3294 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3295 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3296 iCrReg |= 8;
3297 }
3298 switch (iCrReg)
3299 {
3300 case 0: case 2: case 3: case 4: case 8:
3301 break;
3302 default:
3303 IEMOP_RAISE_INVALID_OPCODE_RET();
3304 }
3305 IEMOP_HLP_DONE_DECODING();
3306
3307 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_mov_Rd_Cd, IEM_GET_MODRM_RM(pVCpu, bRm), iCrReg);
3308}
3309
3310
3311/** Opcode 0x0f 0x21. */
3312FNIEMOP_DEF(iemOp_mov_Rd_Dd)
3313{
3314 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd");
3315 IEMOP_HLP_MIN_386();
3316 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3318 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3319 IEMOP_RAISE_INVALID_OPCODE_RET();
3320 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_mov_Rd_Dd, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3321}
3322
3323
3324/** Opcode 0x0f 0x22. */
3325FNIEMOP_DEF(iemOp_mov_Cd_Rd)
3326{
3327 /* mod is ignored, as is operand size overrides. */
3328 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd");
3329 IEMOP_HLP_MIN_386();
3330 if (IEM_IS_64BIT_CODE(pVCpu))
3331 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
3332 else
3333 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT;
3334
3335 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3336 uint8_t iCrReg = IEM_GET_MODRM_REG(pVCpu, bRm);
3337 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3338 {
3339 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
3340 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit)
3341 IEMOP_RAISE_INVALID_OPCODE_RET(); /* #UD takes precedence over #GP(), see test. */
3342 iCrReg |= 8;
3343 }
3344 switch (iCrReg)
3345 {
3346 case 0: case 2: case 3: case 4: case 8:
3347 break;
3348 default:
3349 IEMOP_RAISE_INVALID_OPCODE_RET();
3350 }
3351 IEMOP_HLP_DONE_DECODING();
3352
3353 if (iCrReg & (2 | 8))
3354 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
3355 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3356 else
3357 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT,
3358 iemCImpl_mov_Cd_Rd, iCrReg, IEM_GET_MODRM_RM(pVCpu, bRm));
3359}
3360
3361
3362/** Opcode 0x0f 0x23. */
3363FNIEMOP_DEF(iemOp_mov_Dd_Rd)
3364{
3365 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd");
3366 IEMOP_HLP_MIN_386();
3367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3369 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R)
3370 IEMOP_RAISE_INVALID_OPCODE_RET();
3371 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT,
3372 iemCImpl_mov_Dd_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3373}
3374
3375
3376/** Opcode 0x0f 0x24. */
3377FNIEMOP_DEF(iemOp_mov_Rd_Td)
3378{
3379 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td");
3380 IEMOP_HLP_MIN_386();
3381 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3383 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3384 IEMOP_RAISE_INVALID_OPCODE_RET();
3385 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_mov_Rd_Td, IEM_GET_MODRM_RM(pVCpu, bRm), IEM_GET_MODRM_REG_8(bRm));
3386}
3387
3388
3389/** Opcode 0x0f 0x26. */
3390FNIEMOP_DEF(iemOp_mov_Td_Rd)
3391{
3392 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd");
3393 IEMOP_HLP_MIN_386();
3394 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3396 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_PENTIUM))
3397 IEMOP_RAISE_INVALID_OPCODE_RET();
3398 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_mov_Td_Rd, IEM_GET_MODRM_REG_8(bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
3399}
3400
3401
3402/**
3403 * @opcode 0x28
3404 * @oppfx none
3405 * @opcpuid sse
3406 * @opgroup og_sse_simdfp_datamove
3407 * @opxcpttype 1
3408 * @optest op1=1 op2=2 -> op1=2
3409 * @optest op1=0 op2=-42 -> op1=-42
3410 */
3411FNIEMOP_DEF(iemOp_movaps_Vps_Wps)
3412{
3413 IEMOP_MNEMONIC2(RM, MOVAPS, movaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3414 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3415 if (IEM_IS_MODRM_REG_MODE(bRm))
3416 {
3417 /*
3418 * Register, register.
3419 */
3420 IEM_MC_BEGIN(0, 0, 0, 0);
3421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3422 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3423 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3424 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3425 IEM_GET_MODRM_RM(pVCpu, bRm));
3426 IEM_MC_ADVANCE_RIP_AND_FINISH();
3427 IEM_MC_END();
3428 }
3429 else
3430 {
3431 /*
3432 * Register, memory.
3433 */
3434 IEM_MC_BEGIN(0, 2, 0, 0);
3435 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3437
3438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3440 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3441 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3442
3443 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3444 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3445
3446 IEM_MC_ADVANCE_RIP_AND_FINISH();
3447 IEM_MC_END();
3448 }
3449}
3450
3451/**
3452 * @opcode 0x28
3453 * @oppfx 66
3454 * @opcpuid sse2
3455 * @opgroup og_sse2_pcksclr_datamove
3456 * @opxcpttype 1
3457 * @optest op1=1 op2=2 -> op1=2
3458 * @optest op1=0 op2=-42 -> op1=-42
3459 */
3460FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd)
3461{
3462 IEMOP_MNEMONIC2(RM, MOVAPD, movapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3463 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3464 if (IEM_IS_MODRM_REG_MODE(bRm))
3465 {
3466 /*
3467 * Register, register.
3468 */
3469 IEM_MC_BEGIN(0, 0, 0, 0);
3470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3471 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3472 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3473 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
3474 IEM_GET_MODRM_RM(pVCpu, bRm));
3475 IEM_MC_ADVANCE_RIP_AND_FINISH();
3476 IEM_MC_END();
3477 }
3478 else
3479 {
3480 /*
3481 * Register, memory.
3482 */
3483 IEM_MC_BEGIN(0, 2, 0, 0);
3484 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3486
3487 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3489 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3490 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3491
3492 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3493 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3494
3495 IEM_MC_ADVANCE_RIP_AND_FINISH();
3496 IEM_MC_END();
3497 }
3498}
3499
3500/* Opcode 0xf3 0x0f 0x28 - invalid */
3501/* Opcode 0xf2 0x0f 0x28 - invalid */
3502
3503/**
3504 * @opcode 0x29
3505 * @oppfx none
3506 * @opcpuid sse
3507 * @opgroup og_sse_simdfp_datamove
3508 * @opxcpttype 1
3509 * @optest op1=1 op2=2 -> op1=2
3510 * @optest op1=0 op2=-42 -> op1=-42
3511 */
3512FNIEMOP_DEF(iemOp_movaps_Wps_Vps)
3513{
3514 IEMOP_MNEMONIC2(MR, MOVAPS, movaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3515 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3516 if (IEM_IS_MODRM_REG_MODE(bRm))
3517 {
3518 /*
3519 * Register, register.
3520 */
3521 IEM_MC_BEGIN(0, 0, 0, 0);
3522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3523 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3524 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3525 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3526 IEM_GET_MODRM_REG(pVCpu, bRm));
3527 IEM_MC_ADVANCE_RIP_AND_FINISH();
3528 IEM_MC_END();
3529 }
3530 else
3531 {
3532 /*
3533 * Memory, register.
3534 */
3535 IEM_MC_BEGIN(0, 2, 0, 0);
3536 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3538
3539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3541 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3542 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3543
3544 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3545 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3546
3547 IEM_MC_ADVANCE_RIP_AND_FINISH();
3548 IEM_MC_END();
3549 }
3550}
3551
3552/**
3553 * @opcode 0x29
3554 * @oppfx 66
3555 * @opcpuid sse2
3556 * @opgroup og_sse2_pcksclr_datamove
3557 * @opxcpttype 1
3558 * @optest op1=1 op2=2 -> op1=2
3559 * @optest op1=0 op2=-42 -> op1=-42
3560 */
3561FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd)
3562{
3563 IEMOP_MNEMONIC2(MR, MOVAPD, movapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
3564 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3565 if (IEM_IS_MODRM_REG_MODE(bRm))
3566 {
3567 /*
3568 * Register, register.
3569 */
3570 IEM_MC_BEGIN(0, 0, 0, 0);
3571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3572 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3573 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
3574 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
3575 IEM_GET_MODRM_REG(pVCpu, bRm));
3576 IEM_MC_ADVANCE_RIP_AND_FINISH();
3577 IEM_MC_END();
3578 }
3579 else
3580 {
3581 /*
3582 * Memory, register.
3583 */
3584 IEM_MC_BEGIN(0, 2, 0, 0);
3585 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
3586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3587
3588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3590 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3591 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
3592
3593 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
3594 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
3595
3596 IEM_MC_ADVANCE_RIP_AND_FINISH();
3597 IEM_MC_END();
3598 }
3599}
3600
3601/* Opcode 0xf3 0x0f 0x29 - invalid */
3602/* Opcode 0xf2 0x0f 0x29 - invalid */
3603
3604
3605/** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */
3606FNIEMOP_DEF(iemOp_cvtpi2ps_Vps_Qpi)
3607{
3608 IEMOP_MNEMONIC2(RM, CVTPI2PS, cvtpi2ps, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3609 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3610 if (IEM_IS_MODRM_REG_MODE(bRm))
3611 {
3612 /*
3613 * XMM, MMX
3614 */
3615 IEM_MC_BEGIN(3, 1, 0, 0);
3616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3617 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3618 IEM_MC_LOCAL(X86XMMREG, Dst);
3619 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3620 IEM_MC_ARG(uint64_t, u64Src, 2);
3621 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3622 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3623 IEM_MC_PREPARE_FPU_USAGE();
3624 IEM_MC_FPU_TO_MMX_MODE();
3625
3626 IEM_MC_REF_MXCSR(pfMxcsr);
3627 IEM_MC_FETCH_XREG_XMM(Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); /* Need it because the high quadword remains unchanged. */
3628 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3629
3630 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3631 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3632 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3633 } IEM_MC_ELSE() {
3634 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3635 } IEM_MC_ENDIF();
3636
3637 IEM_MC_ADVANCE_RIP_AND_FINISH();
3638 IEM_MC_END();
3639 }
3640 else
3641 {
3642 /*
3643 * XMM, [mem64]
3644 */
3645 IEM_MC_BEGIN(3, 2, 0, 0);
3646 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3647 IEM_MC_LOCAL(X86XMMREG, Dst);
3648 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3649 IEM_MC_ARG(uint64_t, u64Src, 2);
3650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3651
3652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3654 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3655 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3656 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3657
3658 IEM_MC_PREPARE_FPU_USAGE();
3659 IEM_MC_FPU_TO_MMX_MODE();
3660 IEM_MC_REF_MXCSR(pfMxcsr);
3661
3662 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2ps_u128, pfMxcsr, pDst, u64Src);
3663 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3664 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3665 } IEM_MC_ELSE() {
3666 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3667 } IEM_MC_ENDIF();
3668
3669 IEM_MC_ADVANCE_RIP_AND_FINISH();
3670 IEM_MC_END();
3671 }
3672}
3673
3674
3675/** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */
3676FNIEMOP_DEF(iemOp_cvtpi2pd_Vpd_Qpi)
3677{
3678 IEMOP_MNEMONIC2(RM, CVTPI2PD, cvtpi2pd, Vps, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
3679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3680 if (IEM_IS_MODRM_REG_MODE(bRm))
3681 {
3682 /*
3683 * XMM, MMX
3684 */
3685 IEM_MC_BEGIN(3, 1, 0, 0);
3686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3687 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3688 IEM_MC_LOCAL(X86XMMREG, Dst);
3689 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3690 IEM_MC_ARG(uint64_t, u64Src, 2);
3691 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3692 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3693 IEM_MC_PREPARE_FPU_USAGE();
3694 IEM_MC_FPU_TO_MMX_MODE();
3695
3696 IEM_MC_REF_MXCSR(pfMxcsr);
3697 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
3698
3699 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3700 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3701 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3702 } IEM_MC_ELSE() {
3703 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3704 } IEM_MC_ENDIF();
3705
3706 IEM_MC_ADVANCE_RIP_AND_FINISH();
3707 IEM_MC_END();
3708 }
3709 else
3710 {
3711 /*
3712 * XMM, [mem64]
3713 */
3714 IEM_MC_BEGIN(3, 3, 0, 0);
3715 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
3716 IEM_MC_LOCAL(X86XMMREG, Dst);
3717 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
3718 IEM_MC_ARG(uint64_t, u64Src, 2);
3719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3720
3721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3723 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3724 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3725 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3726
3727 /* Doesn't cause a transition to MMX mode. */
3728 IEM_MC_PREPARE_SSE_USAGE();
3729 IEM_MC_REF_MXCSR(pfMxcsr);
3730
3731 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpi2pd_u128, pfMxcsr, pDst, u64Src);
3732 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3733 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3734 } IEM_MC_ELSE() {
3735 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
3736 } IEM_MC_ENDIF();
3737
3738 IEM_MC_ADVANCE_RIP_AND_FINISH();
3739 IEM_MC_END();
3740 }
3741}
3742
3743
3744/** Opcode 0xf3 0x0f 0x2a - cvtsi2ss Vss, Ey */
3745FNIEMOP_DEF(iemOp_cvtsi2ss_Vss_Ey)
3746{
3747 IEMOP_MNEMONIC2(RM, CVTSI2SS, cvtsi2ss, Vss, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3748
3749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3750 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3751 {
3752 if (IEM_IS_MODRM_REG_MODE(bRm))
3753 {
3754 /* XMM, greg64 */
3755 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3756 IEM_MC_LOCAL(uint32_t, fMxcsr);
3757 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3758 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3759 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3760 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3761
3762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3763 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3764 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3765
3766 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3767 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3768 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3769 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3770 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3771 } IEM_MC_ELSE() {
3772 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3773 } IEM_MC_ENDIF();
3774
3775 IEM_MC_ADVANCE_RIP_AND_FINISH();
3776 IEM_MC_END();
3777 }
3778 else
3779 {
3780 /* XMM, [mem64] */
3781 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
3782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3783 IEM_MC_LOCAL(uint32_t, fMxcsr);
3784 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3785 IEM_MC_LOCAL(int64_t, i64Src);
3786 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3787 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3788 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3789
3790 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3792 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3793 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3794
3795 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3796 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i64, pfMxcsr, pr32Dst, pi64Src);
3797 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3798 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3799 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3800 } IEM_MC_ELSE() {
3801 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3802 } IEM_MC_ENDIF();
3803
3804 IEM_MC_ADVANCE_RIP_AND_FINISH();
3805 IEM_MC_END();
3806 }
3807 }
3808 else
3809 {
3810 if (IEM_IS_MODRM_REG_MODE(bRm))
3811 {
3812 /* greg, XMM */
3813 IEM_MC_BEGIN(3, 2, 0, 0);
3814 IEM_MC_LOCAL(uint32_t, fMxcsr);
3815 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3816 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3817 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3818 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3819
3820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3821 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3822 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3823
3824 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3825 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3826 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3827 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3828 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3829 } IEM_MC_ELSE() {
3830 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3831 } IEM_MC_ENDIF();
3832
3833 IEM_MC_ADVANCE_RIP_AND_FINISH();
3834 IEM_MC_END();
3835 }
3836 else
3837 {
3838 /* greg, [mem32] */
3839 IEM_MC_BEGIN(3, 4, 0, 0);
3840 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3841 IEM_MC_LOCAL(uint32_t, fMxcsr);
3842 IEM_MC_LOCAL(RTFLOAT32U, r32Dst);
3843 IEM_MC_LOCAL(int32_t, i32Src);
3844 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3845 IEM_MC_ARG_LOCAL_REF(PRTFLOAT32U, pr32Dst, r32Dst, 1);
3846 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3847
3848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
3850 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3851 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3852
3853 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3854 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2ss_r32_i32, pfMxcsr, pr32Dst, pi32Src);
3855 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3856 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3857 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3858 } IEM_MC_ELSE() {
3859 IEM_MC_STORE_XREG_R32(IEM_GET_MODRM_REG(pVCpu, bRm), r32Dst);
3860 } IEM_MC_ENDIF();
3861
3862 IEM_MC_ADVANCE_RIP_AND_FINISH();
3863 IEM_MC_END();
3864 }
3865 }
3866}
3867
3868
3869/** Opcode 0xf2 0x0f 0x2a - cvtsi2sd Vsd, Ey */
3870FNIEMOP_DEF(iemOp_cvtsi2sd_Vsd_Ey)
3871{
3872 IEMOP_MNEMONIC2(RM, CVTSI2SD, cvtsi2sd, Vsd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
3873
3874 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3875 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3876 {
3877 if (IEM_IS_MODRM_REG_MODE(bRm))
3878 {
3879 /* XMM, greg64 */
3880 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3881 IEM_MC_LOCAL(uint32_t, fMxcsr);
3882 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3883 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3884 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3885 IEM_MC_ARG(const int64_t *, pi64Src, 2);
3886
3887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3888 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3889 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3890
3891 IEM_MC_REF_GREG_I64_CONST(pi64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3892 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3893 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3894 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3895 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3896 } IEM_MC_ELSE() {
3897 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3898 } IEM_MC_ENDIF();
3899
3900 IEM_MC_ADVANCE_RIP_AND_FINISH();
3901 IEM_MC_END();
3902 }
3903 else
3904 {
3905 /* XMM, [mem64] */
3906 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
3907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3908 IEM_MC_LOCAL(uint32_t, fMxcsr);
3909 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3910 IEM_MC_LOCAL(int64_t, i64Src);
3911 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3912 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3913 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src, i64Src, 2);
3914
3915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3917 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3918 IEM_MC_PREPARE_SSE_USAGE(); /** @todo This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3919
3920 IEM_MC_FETCH_MEM_I64(i64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3921 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i64, pfMxcsr, pr64Dst, pi64Src);
3922 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3923 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3924 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3925 } IEM_MC_ELSE() {
3926 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3927 } IEM_MC_ENDIF();
3928
3929 IEM_MC_ADVANCE_RIP_AND_FINISH();
3930 IEM_MC_END();
3931 }
3932 }
3933 else
3934 {
3935 if (IEM_IS_MODRM_REG_MODE(bRm))
3936 {
3937 /* XMM, greg32 */
3938 IEM_MC_BEGIN(3, 2, 0, 0);
3939 IEM_MC_LOCAL(uint32_t, fMxcsr);
3940 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3941 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3942 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3943 IEM_MC_ARG(const int32_t *, pi32Src, 2);
3944
3945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3946 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3947 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3948
3949 IEM_MC_REF_GREG_I32_CONST(pi32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3950 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3951 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3952 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3953 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3954 } IEM_MC_ELSE() {
3955 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3956 } IEM_MC_ENDIF();
3957
3958 IEM_MC_ADVANCE_RIP_AND_FINISH();
3959 IEM_MC_END();
3960 }
3961 else
3962 {
3963 /* XMM, [mem32] */
3964 IEM_MC_BEGIN(3, 4, 0, 0);
3965 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3966 IEM_MC_LOCAL(uint32_t, fMxcsr);
3967 IEM_MC_LOCAL(RTFLOAT64U, r64Dst);
3968 IEM_MC_LOCAL(int32_t, i32Src);
3969 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
3970 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Dst, r64Dst, 1);
3971 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src, i32Src, 2);
3972
3973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3975 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3976 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
3977
3978 IEM_MC_FETCH_MEM_I32(i32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3979 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsi2sd_r64_i32, pfMxcsr, pr64Dst, pi32Src);
3980 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
3981 IEM_MC_IF_MXCSR_XCPT_PENDING() {
3982 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
3983 } IEM_MC_ELSE() {
3984 IEM_MC_STORE_XREG_R64(IEM_GET_MODRM_REG(pVCpu, bRm), r64Dst);
3985 } IEM_MC_ENDIF();
3986
3987 IEM_MC_ADVANCE_RIP_AND_FINISH();
3988 IEM_MC_END();
3989 }
3990 }
3991}
3992
3993
3994/**
3995 * @opcode 0x2b
3996 * @opcodesub !11 mr/reg
3997 * @oppfx none
3998 * @opcpuid sse
3999 * @opgroup og_sse1_cachect
4000 * @opxcpttype 1
4001 * @optest op1=1 op2=2 -> op1=2
4002 * @optest op1=0 op2=-42 -> op1=-42
4003 */
4004FNIEMOP_DEF(iemOp_movntps_Mps_Vps)
4005{
4006 IEMOP_MNEMONIC2(MR_MEM, MOVNTPS, movntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4007 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4008 if (IEM_IS_MODRM_MEM_MODE(bRm))
4009 {
4010 /*
4011 * memory, register.
4012 */
4013 IEM_MC_BEGIN(0, 2, 0, 0);
4014 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4016
4017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4019 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4020 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4021
4022 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4023 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4024
4025 IEM_MC_ADVANCE_RIP_AND_FINISH();
4026 IEM_MC_END();
4027 }
4028 /* The register, register encoding is invalid. */
4029 else
4030 IEMOP_RAISE_INVALID_OPCODE_RET();
4031}
4032
4033/**
4034 * @opcode 0x2b
4035 * @opcodesub !11 mr/reg
4036 * @oppfx 0x66
4037 * @opcpuid sse2
4038 * @opgroup og_sse2_cachect
4039 * @opxcpttype 1
4040 * @optest op1=1 op2=2 -> op1=2
4041 * @optest op1=0 op2=-42 -> op1=-42
4042 */
4043FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd)
4044{
4045 IEMOP_MNEMONIC2(MR_MEM, MOVNTPD, movntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4046 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4047 if (IEM_IS_MODRM_MEM_MODE(bRm))
4048 {
4049 /*
4050 * memory, register.
4051 */
4052 IEM_MC_BEGIN(0, 2, 0, 0);
4053 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */
4054 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4055
4056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4058 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4059 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
4060
4061 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4062 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4063
4064 IEM_MC_ADVANCE_RIP_AND_FINISH();
4065 IEM_MC_END();
4066 }
4067 /* The register, register encoding is invalid. */
4068 else
4069 IEMOP_RAISE_INVALID_OPCODE_RET();
4070}
4071/* Opcode 0xf3 0x0f 0x2b - invalid */
4072/* Opcode 0xf2 0x0f 0x2b - invalid */
4073
4074
4075/** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */
4076FNIEMOP_DEF(iemOp_cvttps2pi_Ppi_Wps)
4077{
4078 IEMOP_MNEMONIC2(RM, CVTTPS2PI, cvttps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4080 if (IEM_IS_MODRM_REG_MODE(bRm))
4081 {
4082 /*
4083 * Register, register.
4084 */
4085 IEM_MC_BEGIN(3, 1, 0, 0);
4086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4087 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4088 IEM_MC_LOCAL(uint64_t, u64Dst);
4089 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4090 IEM_MC_ARG(uint64_t, u64Src, 2);
4091 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4092 IEM_MC_PREPARE_FPU_USAGE();
4093 IEM_MC_FPU_TO_MMX_MODE();
4094
4095 IEM_MC_REF_MXCSR(pfMxcsr);
4096 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4097
4098 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4099 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4100 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4101 } IEM_MC_ELSE() {
4102 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4103 } IEM_MC_ENDIF();
4104
4105 IEM_MC_ADVANCE_RIP_AND_FINISH();
4106 IEM_MC_END();
4107 }
4108 else
4109 {
4110 /*
4111 * Register, memory.
4112 */
4113 IEM_MC_BEGIN(3, 2, 0, 0);
4114 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4115 IEM_MC_LOCAL(uint64_t, u64Dst);
4116 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4117 IEM_MC_ARG(uint64_t, u64Src, 2);
4118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4119
4120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4122 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4123 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4124
4125 IEM_MC_PREPARE_FPU_USAGE();
4126 IEM_MC_FPU_TO_MMX_MODE();
4127 IEM_MC_REF_MXCSR(pfMxcsr);
4128
4129 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4130 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4131 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4132 } IEM_MC_ELSE() {
4133 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4134 } IEM_MC_ENDIF();
4135
4136 IEM_MC_ADVANCE_RIP_AND_FINISH();
4137 IEM_MC_END();
4138 }
4139}
4140
4141
4142/** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */
4143FNIEMOP_DEF(iemOp_cvttpd2pi_Ppi_Wpd)
4144{
4145 IEMOP_MNEMONIC2(RM, CVTTPD2PI, cvttpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4147 if (IEM_IS_MODRM_REG_MODE(bRm))
4148 {
4149 /*
4150 * Register, register.
4151 */
4152 IEM_MC_BEGIN(3, 1, 0, 0);
4153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4154 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4155 IEM_MC_LOCAL(uint64_t, u64Dst);
4156 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4157 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4158 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4159 IEM_MC_PREPARE_FPU_USAGE();
4160 IEM_MC_FPU_TO_MMX_MODE();
4161
4162 IEM_MC_REF_MXCSR(pfMxcsr);
4163 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4164
4165 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4166 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4167 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4168 } IEM_MC_ELSE() {
4169 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4170 } IEM_MC_ENDIF();
4171
4172 IEM_MC_ADVANCE_RIP_AND_FINISH();
4173 IEM_MC_END();
4174 }
4175 else
4176 {
4177 /*
4178 * Register, memory.
4179 */
4180 IEM_MC_BEGIN(3, 3, 0, 0);
4181 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4182 IEM_MC_LOCAL(uint64_t, u64Dst);
4183 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4184 IEM_MC_LOCAL(X86XMMREG, uSrc);
4185 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4187
4188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4190 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4191 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4192
4193 IEM_MC_PREPARE_FPU_USAGE();
4194 IEM_MC_FPU_TO_MMX_MODE();
4195
4196 IEM_MC_REF_MXCSR(pfMxcsr);
4197
4198 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4199 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4200 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4201 } IEM_MC_ELSE() {
4202 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4203 } IEM_MC_ENDIF();
4204
4205 IEM_MC_ADVANCE_RIP_AND_FINISH();
4206 IEM_MC_END();
4207 }
4208}
4209
4210
4211/** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */
4212FNIEMOP_DEF(iemOp_cvttss2si_Gy_Wss)
4213{
4214 IEMOP_MNEMONIC2(RM, CVTTSS2SI, cvttss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4215
4216 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4217 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4218 {
4219 if (IEM_IS_MODRM_REG_MODE(bRm))
4220 {
4221 /* greg64, XMM */
4222 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4223 IEM_MC_LOCAL(uint32_t, fMxcsr);
4224 IEM_MC_LOCAL(int64_t, i64Dst);
4225 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4226 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4227 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4228
4229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4230 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4231 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4232
4233 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4234 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4235 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4236 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4237 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4238 } IEM_MC_ELSE() {
4239 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4240 } IEM_MC_ENDIF();
4241
4242 IEM_MC_ADVANCE_RIP_AND_FINISH();
4243 IEM_MC_END();
4244 }
4245 else
4246 {
4247 /* greg64, [mem64] */
4248 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4250 IEM_MC_LOCAL(uint32_t, fMxcsr);
4251 IEM_MC_LOCAL(int64_t, i64Dst);
4252 IEM_MC_LOCAL(uint32_t, u32Src);
4253 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4254 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4255 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4256
4257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4259 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4260 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4261
4262 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4263 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4264 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4265 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4266 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4267 } IEM_MC_ELSE() {
4268 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4269 } IEM_MC_ENDIF();
4270
4271 IEM_MC_ADVANCE_RIP_AND_FINISH();
4272 IEM_MC_END();
4273 }
4274 }
4275 else
4276 {
4277 if (IEM_IS_MODRM_REG_MODE(bRm))
4278 {
4279 /* greg, XMM */
4280 IEM_MC_BEGIN(3, 2, 0, 0);
4281 IEM_MC_LOCAL(uint32_t, fMxcsr);
4282 IEM_MC_LOCAL(int32_t, i32Dst);
4283 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4284 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4285 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4286
4287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4288 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4289 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4290
4291 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4292 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4293 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4294 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4295 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4296 } IEM_MC_ELSE() {
4297 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4298 } IEM_MC_ENDIF();
4299
4300 IEM_MC_ADVANCE_RIP_AND_FINISH();
4301 IEM_MC_END();
4302 }
4303 else
4304 {
4305 /* greg, [mem] */
4306 IEM_MC_BEGIN(3, 4, 0, 0);
4307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4308 IEM_MC_LOCAL(uint32_t, fMxcsr);
4309 IEM_MC_LOCAL(int32_t, i32Dst);
4310 IEM_MC_LOCAL(uint32_t, u32Src);
4311 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4312 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4313 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4314
4315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4317 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4318 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4319
4320 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4321 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4322 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4323 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4324 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4325 } IEM_MC_ELSE() {
4326 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4327 } IEM_MC_ENDIF();
4328
4329 IEM_MC_ADVANCE_RIP_AND_FINISH();
4330 IEM_MC_END();
4331 }
4332 }
4333}
4334
4335
4336/** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */
4337FNIEMOP_DEF(iemOp_cvttsd2si_Gy_Wsd)
4338{
4339 IEMOP_MNEMONIC2(RM, CVTTSD2SI, cvttsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4340
4341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4342 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4343 {
4344 if (IEM_IS_MODRM_REG_MODE(bRm))
4345 {
4346 /* greg64, XMM */
4347 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4348 IEM_MC_LOCAL(uint32_t, fMxcsr);
4349 IEM_MC_LOCAL(int64_t, i64Dst);
4350 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4351 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4352 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4353
4354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4355 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4356 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4357
4358 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4359 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4360 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4361 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4362 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4363 } IEM_MC_ELSE() {
4364 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4365 } IEM_MC_ENDIF();
4366
4367 IEM_MC_ADVANCE_RIP_AND_FINISH();
4368 IEM_MC_END();
4369 }
4370 else
4371 {
4372 /* greg64, [mem64] */
4373 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4375 IEM_MC_LOCAL(uint32_t, fMxcsr);
4376 IEM_MC_LOCAL(int64_t, i64Dst);
4377 IEM_MC_LOCAL(uint64_t, u64Src);
4378 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4379 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4380 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4381
4382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4384 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4385 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4386
4387 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4388 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4389 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4390 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4391 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4392 } IEM_MC_ELSE() {
4393 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4394 } IEM_MC_ENDIF();
4395
4396 IEM_MC_ADVANCE_RIP_AND_FINISH();
4397 IEM_MC_END();
4398 }
4399 }
4400 else
4401 {
4402 if (IEM_IS_MODRM_REG_MODE(bRm))
4403 {
4404 /* greg, XMM */
4405 IEM_MC_BEGIN(3, 2, 0, 0);
4406 IEM_MC_LOCAL(uint32_t, fMxcsr);
4407 IEM_MC_LOCAL(int32_t, i32Dst);
4408 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4409 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4410 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4411
4412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4413 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4414 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4415
4416 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4417 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4418 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4419 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4420 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4421 } IEM_MC_ELSE() {
4422 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4423 } IEM_MC_ENDIF();
4424
4425 IEM_MC_ADVANCE_RIP_AND_FINISH();
4426 IEM_MC_END();
4427 }
4428 else
4429 {
4430 /* greg32, [mem32] */
4431 IEM_MC_BEGIN(3, 4, 0, 0);
4432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4433 IEM_MC_LOCAL(uint32_t, fMxcsr);
4434 IEM_MC_LOCAL(int32_t, i32Dst);
4435 IEM_MC_LOCAL(uint64_t, u64Src);
4436 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4437 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4438 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4439
4440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4442 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4443 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4444
4445 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4446 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvttsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4447 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4448 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4449 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4450 } IEM_MC_ELSE() {
4451 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4452 } IEM_MC_ENDIF();
4453
4454 IEM_MC_ADVANCE_RIP_AND_FINISH();
4455 IEM_MC_END();
4456 }
4457 }
4458}
4459
4460
4461/** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */
4462FNIEMOP_DEF(iemOp_cvtps2pi_Ppi_Wps)
4463{
4464 IEMOP_MNEMONIC2(RM, CVTPS2PI, cvtps2pi, Pq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4465 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4466 if (IEM_IS_MODRM_REG_MODE(bRm))
4467 {
4468 /*
4469 * Register, register.
4470 */
4471 IEM_MC_BEGIN(3, 1, 0, 0);
4472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4473 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4474 IEM_MC_LOCAL(uint64_t, u64Dst);
4475 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4476 IEM_MC_ARG(uint64_t, u64Src, 2);
4477
4478 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4479 IEM_MC_PREPARE_FPU_USAGE();
4480 IEM_MC_FPU_TO_MMX_MODE();
4481
4482 IEM_MC_REF_MXCSR(pfMxcsr);
4483 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
4484
4485 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4486 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4487 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4488 } IEM_MC_ELSE() {
4489 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4490 } IEM_MC_ENDIF();
4491
4492 IEM_MC_ADVANCE_RIP_AND_FINISH();
4493 IEM_MC_END();
4494 }
4495 else
4496 {
4497 /*
4498 * Register, memory.
4499 */
4500 IEM_MC_BEGIN(3, 2, 0, 0);
4501 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4502 IEM_MC_LOCAL(uint64_t, u64Dst);
4503 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4504 IEM_MC_ARG(uint64_t, u64Src, 2);
4505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4506
4507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4509 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4510 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4511
4512 IEM_MC_PREPARE_FPU_USAGE();
4513 IEM_MC_FPU_TO_MMX_MODE();
4514 IEM_MC_REF_MXCSR(pfMxcsr);
4515
4516 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src);
4517 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4518 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4519 } IEM_MC_ELSE() {
4520 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4521 } IEM_MC_ENDIF();
4522
4523 IEM_MC_ADVANCE_RIP_AND_FINISH();
4524 IEM_MC_END();
4525 }
4526}
4527
4528
4529/** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */
4530FNIEMOP_DEF(iemOp_cvtpd2pi_Qpi_Wpd)
4531{
4532 IEMOP_MNEMONIC2(RM, CVTPD2PI, cvtpd2pi, Pq, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
4533 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4534 if (IEM_IS_MODRM_REG_MODE(bRm))
4535 {
4536 /*
4537 * Register, register.
4538 */
4539 IEM_MC_BEGIN(3, 1, 0, 0);
4540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4541 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4542 IEM_MC_LOCAL(uint64_t, u64Dst);
4543 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4544 IEM_MC_ARG(PCX86XMMREG, pSrc, 2);
4545
4546 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4547 IEM_MC_PREPARE_FPU_USAGE();
4548 IEM_MC_FPU_TO_MMX_MODE();
4549
4550 IEM_MC_REF_MXCSR(pfMxcsr);
4551 IEM_MC_REF_XREG_XMM_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4552
4553 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4554 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4555 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4556 } IEM_MC_ELSE() {
4557 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4558 } IEM_MC_ENDIF();
4559
4560 IEM_MC_ADVANCE_RIP_AND_FINISH();
4561 IEM_MC_END();
4562 }
4563 else
4564 {
4565 /*
4566 * Register, memory.
4567 */
4568 IEM_MC_BEGIN(3, 3, 0, 0);
4569 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4570 IEM_MC_LOCAL(uint64_t, u64Dst);
4571 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Dst, 1);
4572 IEM_MC_LOCAL(X86XMMREG, uSrc);
4573 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, pSrc, uSrc, 2);
4574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4575
4576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4578 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4579 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4580
4581 IEM_MC_PREPARE_FPU_USAGE();
4582 IEM_MC_FPU_TO_MMX_MODE();
4583
4584 IEM_MC_REF_MXCSR(pfMxcsr);
4585
4586 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtpd2pi_u128, pfMxcsr, pu64Dst, pSrc);
4587 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4588 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4589 } IEM_MC_ELSE() {
4590 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Dst);
4591 } IEM_MC_ENDIF();
4592
4593 IEM_MC_ADVANCE_RIP_AND_FINISH();
4594 IEM_MC_END();
4595 }
4596}
4597
4598
4599/** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */
4600FNIEMOP_DEF(iemOp_cvtss2si_Gy_Wss)
4601{
4602 IEMOP_MNEMONIC2(RM, CVTSS2SI, cvtss2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4603
4604 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4605 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4606 {
4607 if (IEM_IS_MODRM_REG_MODE(bRm))
4608 {
4609 /* greg64, XMM */
4610 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4611 IEM_MC_LOCAL(uint32_t, fMxcsr);
4612 IEM_MC_LOCAL(int64_t, i64Dst);
4613 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4614 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4615 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4616
4617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4618 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4619 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4620
4621 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4622 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4623 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4624 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4625 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4626 } IEM_MC_ELSE() {
4627 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4628 } IEM_MC_ENDIF();
4629
4630 IEM_MC_ADVANCE_RIP_AND_FINISH();
4631 IEM_MC_END();
4632 }
4633 else
4634 {
4635 /* greg64, [mem64] */
4636 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4638 IEM_MC_LOCAL(uint32_t, fMxcsr);
4639 IEM_MC_LOCAL(int64_t, i64Dst);
4640 IEM_MC_LOCAL(uint32_t, u32Src);
4641 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4642 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4643 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4644
4645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4647 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4648 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4649
4650 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4651 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i64_r32, pfMxcsr, pi64Dst, pu32Src);
4652 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4653 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4654 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4655 } IEM_MC_ELSE() {
4656 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4657 } IEM_MC_ENDIF();
4658
4659 IEM_MC_ADVANCE_RIP_AND_FINISH();
4660 IEM_MC_END();
4661 }
4662 }
4663 else
4664 {
4665 if (IEM_IS_MODRM_REG_MODE(bRm))
4666 {
4667 /* greg, XMM */
4668 IEM_MC_BEGIN(3, 2, 0, 0);
4669 IEM_MC_LOCAL(uint32_t, fMxcsr);
4670 IEM_MC_LOCAL(int32_t, i32Dst);
4671 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4672 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4673 IEM_MC_ARG(const uint32_t *, pu32Src, 2);
4674
4675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4676 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4677 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4678
4679 IEM_MC_REF_XREG_U32_CONST(pu32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4680 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4681 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4682 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4683 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4684 } IEM_MC_ELSE() {
4685 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4686 } IEM_MC_ENDIF();
4687
4688 IEM_MC_ADVANCE_RIP_AND_FINISH();
4689 IEM_MC_END();
4690 }
4691 else
4692 {
4693 /* greg, [mem] */
4694 IEM_MC_BEGIN(3, 4, 0, 0);
4695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4696 IEM_MC_LOCAL(uint32_t, fMxcsr);
4697 IEM_MC_LOCAL(int32_t, i32Dst);
4698 IEM_MC_LOCAL(uint32_t, u32Src);
4699 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4700 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4701 IEM_MC_ARG_LOCAL_REF(const uint32_t *, pu32Src, u32Src, 2);
4702
4703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4705 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4706 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4707
4708 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4709 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtss2si_i32_r32, pfMxcsr, pi32Dst, pu32Src);
4710 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4711 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4712 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4713 } IEM_MC_ELSE() {
4714 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4715 } IEM_MC_ENDIF();
4716
4717 IEM_MC_ADVANCE_RIP_AND_FINISH();
4718 IEM_MC_END();
4719 }
4720 }
4721}
4722
4723
4724/** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */
4725FNIEMOP_DEF(iemOp_cvtsd2si_Gy_Wsd)
4726{
4727 IEMOP_MNEMONIC2(RM, CVTSD2SI, cvtsd2si, Gy, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
4728
4729 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4730 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4731 {
4732 if (IEM_IS_MODRM_REG_MODE(bRm))
4733 {
4734 /* greg64, XMM */
4735 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
4736 IEM_MC_LOCAL(uint32_t, fMxcsr);
4737 IEM_MC_LOCAL(int64_t, i64Dst);
4738 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4739 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4740 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4741
4742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4743 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4744 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4745
4746 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4747 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4748 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4749 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4750 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4751 } IEM_MC_ELSE() {
4752 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4753 } IEM_MC_ENDIF();
4754
4755 IEM_MC_ADVANCE_RIP_AND_FINISH();
4756 IEM_MC_END();
4757 }
4758 else
4759 {
4760 /* greg64, [mem64] */
4761 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
4762 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4763 IEM_MC_LOCAL(uint32_t, fMxcsr);
4764 IEM_MC_LOCAL(int64_t, i64Dst);
4765 IEM_MC_LOCAL(uint64_t, u64Src);
4766 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4767 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 1);
4768 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4769
4770 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4772 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4773 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4774
4775 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4776 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i64_r64, pfMxcsr, pi64Dst, pu64Src);
4777 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4778 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4779 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4780 } IEM_MC_ELSE() {
4781 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst);
4782 } IEM_MC_ENDIF();
4783
4784 IEM_MC_ADVANCE_RIP_AND_FINISH();
4785 IEM_MC_END();
4786 }
4787 }
4788 else
4789 {
4790 if (IEM_IS_MODRM_REG_MODE(bRm))
4791 {
4792 /* greg32, XMM */
4793 IEM_MC_BEGIN(3, 2, 0, 0);
4794 IEM_MC_LOCAL(uint32_t, fMxcsr);
4795 IEM_MC_LOCAL(int32_t, i32Dst);
4796 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4797 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4798 IEM_MC_ARG(const uint64_t *, pu64Src, 2);
4799
4800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4801 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4802 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4803
4804 IEM_MC_REF_XREG_U64_CONST(pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4805 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4806 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4807 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4808 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4809 } IEM_MC_ELSE() {
4810 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4811 } IEM_MC_ENDIF();
4812
4813 IEM_MC_ADVANCE_RIP_AND_FINISH();
4814 IEM_MC_END();
4815 }
4816 else
4817 {
4818 /* greg32, [mem64] */
4819 IEM_MC_BEGIN(3, 4, 0, 0);
4820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4821 IEM_MC_LOCAL(uint32_t, fMxcsr);
4822 IEM_MC_LOCAL(int32_t, i32Dst);
4823 IEM_MC_LOCAL(uint64_t, u64Src);
4824 IEM_MC_ARG_LOCAL_REF(uint32_t *, pfMxcsr, fMxcsr, 0);
4825 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 1);
4826 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 2);
4827
4828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4830 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4831 IEM_MC_PREPARE_SSE_USAGE(); /** @todo: This is superfluous because IEM_MC_CALL_SSE_AIMPL_3() is calling this but the tstIEMCheckMc testcase depends on it. */
4832
4833 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4834 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_cvtsd2si_i32_r64, pfMxcsr, pi32Dst, pu64Src);
4835 IEM_MC_SSE_UPDATE_MXCSR(fMxcsr);
4836 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4837 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4838 } IEM_MC_ELSE() {
4839 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst);
4840 } IEM_MC_ENDIF();
4841
4842 IEM_MC_ADVANCE_RIP_AND_FINISH();
4843 IEM_MC_END();
4844 }
4845 }
4846}
4847
4848
4849/** Opcode 0x0f 0x2e - ucomiss Vss, Wss */
4850FNIEMOP_DEF(iemOp_ucomiss_Vss_Wss)
4851{
4852 IEMOP_MNEMONIC2(RM, UCOMISS, ucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4853 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4854 if (IEM_IS_MODRM_REG_MODE(bRm))
4855 {
4856 /*
4857 * Register, register.
4858 */
4859 IEM_MC_BEGIN(4, 1, 0, 0);
4860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4861 IEM_MC_LOCAL(uint32_t, fEFlags);
4862 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4863 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4864 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4865 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4866 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4867 IEM_MC_PREPARE_SSE_USAGE();
4868 IEM_MC_FETCH_EFLAGS(fEFlags);
4869 IEM_MC_REF_MXCSR(pfMxcsr);
4870 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4871 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4872 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4873 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4874 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4875 } IEM_MC_ELSE() {
4876 IEM_MC_COMMIT_EFLAGS(fEFlags);
4877 } IEM_MC_ENDIF();
4878
4879 IEM_MC_ADVANCE_RIP_AND_FINISH();
4880 IEM_MC_END();
4881 }
4882 else
4883 {
4884 /*
4885 * Register, memory.
4886 */
4887 IEM_MC_BEGIN(4, 3, 0, 0);
4888 IEM_MC_LOCAL(uint32_t, fEFlags);
4889 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4890 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4891 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4892 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4893 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4895
4896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
4898 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4899 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4900
4901 IEM_MC_PREPARE_SSE_USAGE();
4902 IEM_MC_FETCH_EFLAGS(fEFlags);
4903 IEM_MC_REF_MXCSR(pfMxcsr);
4904 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4905 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4906 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4907 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4908 } IEM_MC_ELSE() {
4909 IEM_MC_COMMIT_EFLAGS(fEFlags);
4910 } IEM_MC_ENDIF();
4911
4912 IEM_MC_ADVANCE_RIP_AND_FINISH();
4913 IEM_MC_END();
4914 }
4915}
4916
4917
4918/** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */
4919FNIEMOP_DEF(iemOp_ucomisd_Vsd_Wsd)
4920{
4921 IEMOP_MNEMONIC2(RM, UCOMISD, ucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4922 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4923 if (IEM_IS_MODRM_REG_MODE(bRm))
4924 {
4925 /*
4926 * Register, register.
4927 */
4928 IEM_MC_BEGIN(4, 1, 0, 0);
4929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4930 IEM_MC_LOCAL(uint32_t, fEFlags);
4931 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4932 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4933 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4934 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
4935 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4936 IEM_MC_PREPARE_SSE_USAGE();
4937 IEM_MC_FETCH_EFLAGS(fEFlags);
4938 IEM_MC_REF_MXCSR(pfMxcsr);
4939 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4940 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
4941 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4942 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4943 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4944 } IEM_MC_ELSE() {
4945 IEM_MC_COMMIT_EFLAGS(fEFlags);
4946 } IEM_MC_ENDIF();
4947
4948 IEM_MC_ADVANCE_RIP_AND_FINISH();
4949 IEM_MC_END();
4950 }
4951 else
4952 {
4953 /*
4954 * Register, memory.
4955 */
4956 IEM_MC_BEGIN(4, 3, 0, 0);
4957 IEM_MC_LOCAL(uint32_t, fEFlags);
4958 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
4959 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
4960 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
4961 IEM_MC_LOCAL(X86XMMREG, uSrc2);
4962 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
4963 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4964
4965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4967 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4968 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4969
4970 IEM_MC_PREPARE_SSE_USAGE();
4971 IEM_MC_FETCH_EFLAGS(fEFlags);
4972 IEM_MC_REF_MXCSR(pfMxcsr);
4973 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
4974 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_ucomisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
4975 IEM_MC_IF_MXCSR_XCPT_PENDING() {
4976 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
4977 } IEM_MC_ELSE() {
4978 IEM_MC_COMMIT_EFLAGS(fEFlags);
4979 } IEM_MC_ENDIF();
4980
4981 IEM_MC_ADVANCE_RIP_AND_FINISH();
4982 IEM_MC_END();
4983 }
4984}
4985
4986
4987/* Opcode 0xf3 0x0f 0x2e - invalid */
4988/* Opcode 0xf2 0x0f 0x2e - invalid */
4989
4990
4991/** Opcode 0x0f 0x2f - comiss Vss, Wss */
4992FNIEMOP_DEF(iemOp_comiss_Vss_Wss)
4993{
4994 IEMOP_MNEMONIC2(RM, COMISS, comiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
4995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4996 if (IEM_IS_MODRM_REG_MODE(bRm))
4997 {
4998 /*
4999 * Register, register.
5000 */
5001 IEM_MC_BEGIN(4, 1, 0, 0);
5002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5003 IEM_MC_LOCAL(uint32_t, fEFlags);
5004 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5005 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5006 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5007 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5008 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5009 IEM_MC_PREPARE_SSE_USAGE();
5010 IEM_MC_FETCH_EFLAGS(fEFlags);
5011 IEM_MC_REF_MXCSR(pfMxcsr);
5012 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5013 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5014 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5015 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5016 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5017 } IEM_MC_ELSE() {
5018 IEM_MC_COMMIT_EFLAGS(fEFlags);
5019 } IEM_MC_ENDIF();
5020
5021 IEM_MC_ADVANCE_RIP_AND_FINISH();
5022 IEM_MC_END();
5023 }
5024 else
5025 {
5026 /*
5027 * Register, memory.
5028 */
5029 IEM_MC_BEGIN(4, 3, 0, 0);
5030 IEM_MC_LOCAL(uint32_t, fEFlags);
5031 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5032 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5033 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5034 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5035 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5037
5038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5040 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5041 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5042
5043 IEM_MC_PREPARE_SSE_USAGE();
5044 IEM_MC_FETCH_EFLAGS(fEFlags);
5045 IEM_MC_REF_MXCSR(pfMxcsr);
5046 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5047 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comiss_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5048 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5049 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5050 } IEM_MC_ELSE() {
5051 IEM_MC_COMMIT_EFLAGS(fEFlags);
5052 } IEM_MC_ENDIF();
5053
5054 IEM_MC_ADVANCE_RIP_AND_FINISH();
5055 IEM_MC_END();
5056 }
5057}
5058
5059
5060/** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */
5061FNIEMOP_DEF(iemOp_comisd_Vsd_Wsd)
5062{
5063 IEMOP_MNEMONIC2(RM, COMISD, comisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
5064 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5065 if (IEM_IS_MODRM_REG_MODE(bRm))
5066 {
5067 /*
5068 * Register, register.
5069 */
5070 IEM_MC_BEGIN(4, 1, 0, 0);
5071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5072 IEM_MC_LOCAL(uint32_t, fEFlags);
5073 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5074 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5075 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5076 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
5077 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5078 IEM_MC_PREPARE_SSE_USAGE();
5079 IEM_MC_FETCH_EFLAGS(fEFlags);
5080 IEM_MC_REF_MXCSR(pfMxcsr);
5081 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5082 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
5083 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5084 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5085 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5086 } IEM_MC_ELSE() {
5087 IEM_MC_COMMIT_EFLAGS(fEFlags);
5088 } IEM_MC_ENDIF();
5089
5090 IEM_MC_ADVANCE_RIP_AND_FINISH();
5091 IEM_MC_END();
5092 }
5093 else
5094 {
5095 /*
5096 * Register, memory.
5097 */
5098 IEM_MC_BEGIN(4, 3, 0, 0);
5099 IEM_MC_LOCAL(uint32_t, fEFlags);
5100 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
5101 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
5102 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
5103 IEM_MC_LOCAL(X86XMMREG, uSrc2);
5104 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
5105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5106
5107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5109 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5110 IEM_MC_FETCH_MEM_XMM_U64(uSrc2, 0 /*a_QWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5111
5112 IEM_MC_PREPARE_SSE_USAGE();
5113 IEM_MC_FETCH_EFLAGS(fEFlags);
5114 IEM_MC_REF_MXCSR(pfMxcsr);
5115 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
5116 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_comisd_u128, pfMxcsr, pEFlags, puSrc1, puSrc2);
5117 IEM_MC_IF_MXCSR_XCPT_PENDING() {
5118 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
5119 } IEM_MC_ELSE() {
5120 IEM_MC_COMMIT_EFLAGS(fEFlags);
5121 } IEM_MC_ENDIF();
5122
5123 IEM_MC_ADVANCE_RIP_AND_FINISH();
5124 IEM_MC_END();
5125 }
5126}
5127
5128
5129/* Opcode 0xf3 0x0f 0x2f - invalid */
5130/* Opcode 0xf2 0x0f 0x2f - invalid */
5131
5132/** Opcode 0x0f 0x30. */
5133FNIEMOP_DEF(iemOp_wrmsr)
5134{
5135 IEMOP_MNEMONIC(wrmsr, "wrmsr");
5136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5137 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_wrmsr);
5138}
5139
5140
5141/** Opcode 0x0f 0x31. */
5142FNIEMOP_DEF(iemOp_rdtsc)
5143{
5144 IEMOP_MNEMONIC(rdtsc, "rdtsc");
5145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5146 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdtsc);
5147}
5148
5149
5150/** Opcode 0x0f 0x33. */
5151FNIEMOP_DEF(iemOp_rdmsr)
5152{
5153 IEMOP_MNEMONIC(rdmsr, "rdmsr");
5154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5155 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdmsr);
5156}
5157
5158
5159/** Opcode 0x0f 0x34. */
5160FNIEMOP_DEF(iemOp_rdpmc)
5161{
5162 IEMOP_MNEMONIC(rdpmc, "rdpmc");
5163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5164 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_rdpmc);
5165}
5166
5167
5168/** Opcode 0x0f 0x34. */
5169FNIEMOP_DEF(iemOp_sysenter)
5170{
5171 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5173 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
5174 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
5175 iemCImpl_sysenter);
5176}
5177
5178/** Opcode 0x0f 0x35. */
5179FNIEMOP_DEF(iemOp_sysexit)
5180{
5181 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0);
5182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5183 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
5184 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
5185 iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize);
5186}
5187
5188/** Opcode 0x0f 0x37. */
5189FNIEMOP_STUB(iemOp_getsec);
5190
5191
5192/** Opcode 0x0f 0x38. */
5193FNIEMOP_DEF(iemOp_3byte_Esc_0f_38)
5194{
5195#ifdef IEM_WITH_THREE_0F_38
5196 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5197 return FNIEMOP_CALL(g_apfnThreeByte0f38[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5198#else
5199 IEMOP_BITCH_ABOUT_STUB();
5200 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5201#endif
5202}
5203
5204
5205/** Opcode 0x0f 0x3a. */
5206FNIEMOP_DEF(iemOp_3byte_Esc_0f_3a)
5207{
5208#ifdef IEM_WITH_THREE_0F_3A
5209 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5210 return FNIEMOP_CALL(g_apfnThreeByte0f3a[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
5211#else
5212 IEMOP_BITCH_ABOUT_STUB();
5213 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5214#endif
5215}
5216
5217
5218/**
5219 * Implements a conditional move.
5220 *
5221 * Wish there was an obvious way to do this where we could share and reduce
5222 * code bloat.
5223 *
5224 * @param a_Cnd The conditional "microcode" operation.
5225 */
5226#define CMOV_X(a_Cnd) \
5227 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5228 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5229 { \
5230 switch (pVCpu->iem.s.enmEffOpSize) \
5231 { \
5232 case IEMMODE_16BIT: \
5233 IEM_MC_BEGIN(0, 1, 0, 0); \
5234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5235 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5236 a_Cnd { \
5237 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5238 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5239 } IEM_MC_ENDIF(); \
5240 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5241 IEM_MC_END(); \
5242 break; \
5243 \
5244 case IEMMODE_32BIT: \
5245 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0); \
5246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5247 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5248 a_Cnd { \
5249 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5250 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5251 } IEM_MC_ELSE() { \
5252 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5253 } IEM_MC_ENDIF(); \
5254 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5255 IEM_MC_END(); \
5256 break; \
5257 \
5258 case IEMMODE_64BIT: \
5259 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0); \
5260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5261 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5262 a_Cnd { \
5263 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5264 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5265 } IEM_MC_ENDIF(); \
5266 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5267 IEM_MC_END(); \
5268 break; \
5269 \
5270 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5271 } \
5272 } \
5273 else \
5274 { \
5275 switch (pVCpu->iem.s.enmEffOpSize) \
5276 { \
5277 case IEMMODE_16BIT: \
5278 IEM_MC_BEGIN(0, 2, 0, 0); \
5279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5280 IEM_MC_LOCAL(uint16_t, u16Tmp); \
5281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5283 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5284 a_Cnd { \
5285 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp); \
5286 } IEM_MC_ENDIF(); \
5287 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5288 IEM_MC_END(); \
5289 break; \
5290 \
5291 case IEMMODE_32BIT: \
5292 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0); \
5293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5294 IEM_MC_LOCAL(uint32_t, u32Tmp); \
5295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5297 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5298 a_Cnd { \
5299 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp); \
5300 } IEM_MC_ELSE() { \
5301 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
5302 } IEM_MC_ENDIF(); \
5303 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5304 IEM_MC_END(); \
5305 break; \
5306 \
5307 case IEMMODE_64BIT: \
5308 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0); \
5309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5310 IEM_MC_LOCAL(uint64_t, u64Tmp); \
5311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
5312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5313 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5314 a_Cnd { \
5315 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp); \
5316 } IEM_MC_ENDIF(); \
5317 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5318 IEM_MC_END(); \
5319 break; \
5320 \
5321 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5322 } \
5323 } do {} while (0)
5324
5325
5326
5327/** Opcode 0x0f 0x40. */
5328FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
5329{
5330 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev");
5331 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
5332}
5333
5334
5335/** Opcode 0x0f 0x41. */
5336FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
5337{
5338 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev");
5339 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
5340}
5341
5342
5343/** Opcode 0x0f 0x42. */
5344FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
5345{
5346 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev");
5347 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
5348}
5349
5350
5351/** Opcode 0x0f 0x43. */
5352FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
5353{
5354 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev");
5355 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
5356}
5357
5358
5359/** Opcode 0x0f 0x44. */
5360FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
5361{
5362 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev");
5363 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
5364}
5365
5366
5367/** Opcode 0x0f 0x45. */
5368FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
5369{
5370 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev");
5371 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
5372}
5373
5374
5375/** Opcode 0x0f 0x46. */
5376FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
5377{
5378 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev");
5379 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5380}
5381
5382
5383/** Opcode 0x0f 0x47. */
5384FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
5385{
5386 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev");
5387 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
5388}
5389
5390
5391/** Opcode 0x0f 0x48. */
5392FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
5393{
5394 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev");
5395 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
5396}
5397
5398
5399/** Opcode 0x0f 0x49. */
5400FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
5401{
5402 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev");
5403 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
5404}
5405
5406
5407/** Opcode 0x0f 0x4a. */
5408FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
5409{
5410 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev");
5411 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
5412}
5413
5414
5415/** Opcode 0x0f 0x4b. */
5416FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
5417{
5418 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev");
5419 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
5420}
5421
5422
5423/** Opcode 0x0f 0x4c. */
5424FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
5425{
5426 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev");
5427 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
5428}
5429
5430
5431/** Opcode 0x0f 0x4d. */
5432FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
5433{
5434 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev");
5435 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
5436}
5437
5438
5439/** Opcode 0x0f 0x4e. */
5440FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
5441{
5442 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev");
5443 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5444}
5445
5446
5447/** Opcode 0x0f 0x4f. */
5448FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
5449{
5450 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev");
5451 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
5452}
5453
5454#undef CMOV_X
5455
5456/** Opcode 0x0f 0x50 - movmskps Gy, Ups */
5457FNIEMOP_DEF(iemOp_movmskps_Gy_Ups)
5458{
5459 IEMOP_MNEMONIC2(RM_REG, MOVMSKPS, movmskps, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5461 if (IEM_IS_MODRM_REG_MODE(bRm))
5462 {
5463 /*
5464 * Register, register.
5465 */
5466 IEM_MC_BEGIN(2, 1, 0, 0);
5467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
5468 IEM_MC_LOCAL(uint8_t, u8Dst);
5469 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5470 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5471 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5472 IEM_MC_PREPARE_SSE_USAGE();
5473 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5474 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskps_u128, pu8Dst, puSrc);
5475 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
5476 IEM_MC_ADVANCE_RIP_AND_FINISH();
5477 IEM_MC_END();
5478 }
5479 /* No memory operand. */
5480 else
5481 IEMOP_RAISE_INVALID_OPCODE_RET();
5482}
5483
5484
5485/** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */
5486FNIEMOP_DEF(iemOp_movmskpd_Gy_Upd)
5487{
5488 IEMOP_MNEMONIC2(RM_REG, MOVMSKPD, movmskpd, Gy, Ux, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /** @todo */
5489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5490 if (IEM_IS_MODRM_REG_MODE(bRm))
5491 {
5492 /*
5493 * Register, register.
5494 */
5495 IEM_MC_BEGIN(2, 1, 0, 0);
5496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
5497 IEM_MC_LOCAL(uint8_t, u8Dst);
5498 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
5499 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5500 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5501 IEM_MC_PREPARE_SSE_USAGE();
5502 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5503 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movmskpd_u128, pu8Dst, puSrc);
5504 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG_8(bRm), u8Dst);
5505 IEM_MC_ADVANCE_RIP_AND_FINISH();
5506 IEM_MC_END();
5507 }
5508 /* No memory operand. */
5509 else
5510 IEMOP_RAISE_INVALID_OPCODE_RET();
5511
5512}
5513
5514
5515/* Opcode 0xf3 0x0f 0x50 - invalid */
5516/* Opcode 0xf2 0x0f 0x50 - invalid */
5517
5518
5519/** Opcode 0x0f 0x51 - sqrtps Vps, Wps */
5520FNIEMOP_DEF(iemOp_sqrtps_Vps_Wps)
5521{
5522 IEMOP_MNEMONIC2(RM, SQRTPS, sqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5523 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_sqrtps_u128);
5524}
5525
5526
5527/** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */
5528FNIEMOP_DEF(iemOp_sqrtpd_Vpd_Wpd)
5529{
5530 IEMOP_MNEMONIC2(RM, SQRTPD, sqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5531 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_sqrtpd_u128);
5532}
5533
5534
5535/** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */
5536FNIEMOP_DEF(iemOp_sqrtss_Vss_Wss)
5537{
5538 IEMOP_MNEMONIC2(RM, SQRTSS, sqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5539 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_sqrtss_u128_r32);
5540}
5541
5542
5543/** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */
5544FNIEMOP_DEF(iemOp_sqrtsd_Vsd_Wsd)
5545{
5546 IEMOP_MNEMONIC2(RM, SQRTSD, sqrtsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5547 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_sqrtsd_u128_r64);
5548}
5549
5550
5551/** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */
5552FNIEMOP_DEF(iemOp_rsqrtps_Vps_Wps)
5553{
5554 IEMOP_MNEMONIC2(RM, RSQRTPS, rsqrtps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5555 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_rsqrtps_u128);
5556}
5557
5558
5559/* Opcode 0x66 0x0f 0x52 - invalid */
5560
5561
5562/** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */
5563FNIEMOP_DEF(iemOp_rsqrtss_Vss_Wss)
5564{
5565 IEMOP_MNEMONIC2(RM, RSQRTSS, rsqrtss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5566 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_rsqrtss_u128_r32);
5567}
5568
5569
5570/* Opcode 0xf2 0x0f 0x52 - invalid */
5571
5572/** Opcode 0x0f 0x53 - rcpps Vps, Wps */
5573FNIEMOP_STUB(iemOp_rcpps_Vps_Wps);
5574/* Opcode 0x66 0x0f 0x53 - invalid */
5575/** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */
5576FNIEMOP_STUB(iemOp_rcpss_Vss_Wss);
5577/* Opcode 0xf2 0x0f 0x53 - invalid */
5578
5579
5580/** Opcode 0x0f 0x54 - andps Vps, Wps */
5581FNIEMOP_DEF(iemOp_andps_Vps_Wps)
5582{
5583 IEMOP_MNEMONIC2(RM, ANDPS, andps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5584 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pand_u128);
5585}
5586
5587
5588/** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */
5589FNIEMOP_DEF(iemOp_andpd_Vpd_Wpd)
5590{
5591 IEMOP_MNEMONIC2(RM, ANDPD, andpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5592 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
5593}
5594
5595
5596/* Opcode 0xf3 0x0f 0x54 - invalid */
5597/* Opcode 0xf2 0x0f 0x54 - invalid */
5598
5599
5600/** Opcode 0x0f 0x55 - andnps Vps, Wps */
5601FNIEMOP_DEF(iemOp_andnps_Vps_Wps)
5602{
5603 IEMOP_MNEMONIC2(RM, ANDNPS, andnps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5604 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pandn_u128);
5605}
5606
5607
5608/** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */
5609FNIEMOP_DEF(iemOp_andnpd_Vpd_Wpd)
5610{
5611 IEMOP_MNEMONIC2(RM, ANDNPD, andnpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5612 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
5613}
5614
5615
5616/* Opcode 0xf3 0x0f 0x55 - invalid */
5617/* Opcode 0xf2 0x0f 0x55 - invalid */
5618
5619
5620/** Opcode 0x0f 0x56 - orps Vps, Wps */
5621FNIEMOP_DEF(iemOp_orps_Vps_Wps)
5622{
5623 IEMOP_MNEMONIC2(RM, ORPS, orps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5624 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_por_u128);
5625}
5626
5627
5628/** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */
5629FNIEMOP_DEF(iemOp_orpd_Vpd_Wpd)
5630{
5631 IEMOP_MNEMONIC2(RM, ORPD, orpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5632 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
5633}
5634
5635
5636/* Opcode 0xf3 0x0f 0x56 - invalid */
5637/* Opcode 0xf2 0x0f 0x56 - invalid */
5638
5639
5640/** Opcode 0x0f 0x57 - xorps Vps, Wps */
5641FNIEMOP_DEF(iemOp_xorps_Vps_Wps)
5642{
5643 IEMOP_MNEMONIC2(RM, XORPS, xorps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5644 return FNIEMOP_CALL_1(iemOpCommonSse_FullFull_To_Full, iemAImpl_pxor_u128);
5645}
5646
5647
5648/** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */
5649FNIEMOP_DEF(iemOp_xorpd_Vpd_Wpd)
5650{
5651 IEMOP_MNEMONIC2(RM, XORPD, xorpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5652 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
5653}
5654
5655
5656/* Opcode 0xf3 0x0f 0x57 - invalid */
5657/* Opcode 0xf2 0x0f 0x57 - invalid */
5658
5659/** Opcode 0x0f 0x58 - addps Vps, Wps */
5660FNIEMOP_DEF(iemOp_addps_Vps_Wps)
5661{
5662 IEMOP_MNEMONIC2(RM, ADDPS, addps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5663 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_addps_u128);
5664}
5665
5666
5667/** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */
5668FNIEMOP_DEF(iemOp_addpd_Vpd_Wpd)
5669{
5670 IEMOP_MNEMONIC2(RM, ADDPD, addpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5671 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_addpd_u128);
5672}
5673
5674
5675/** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */
5676FNIEMOP_DEF(iemOp_addss_Vss_Wss)
5677{
5678 IEMOP_MNEMONIC2(RM, ADDSS, addss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5679 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_addss_u128_r32);
5680}
5681
5682
5683/** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */
5684FNIEMOP_DEF(iemOp_addsd_Vsd_Wsd)
5685{
5686 IEMOP_MNEMONIC2(RM, ADDSD, addsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5687 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_addsd_u128_r64);
5688}
5689
5690
5691/** Opcode 0x0f 0x59 - mulps Vps, Wps */
5692FNIEMOP_DEF(iemOp_mulps_Vps_Wps)
5693{
5694 IEMOP_MNEMONIC2(RM, MULPS, mulps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5695 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_mulps_u128);
5696}
5697
5698
5699/** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */
5700FNIEMOP_DEF(iemOp_mulpd_Vpd_Wpd)
5701{
5702 IEMOP_MNEMONIC2(RM, MULPD, mulpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5703 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_mulpd_u128);
5704}
5705
5706
5707/** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */
5708FNIEMOP_DEF(iemOp_mulss_Vss_Wss)
5709{
5710 IEMOP_MNEMONIC2(RM, MULSS, mulss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5711 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_mulss_u128_r32);
5712}
5713
5714
5715/** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */
5716FNIEMOP_DEF(iemOp_mulsd_Vsd_Wsd)
5717{
5718 IEMOP_MNEMONIC2(RM, MULSD, mulsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5719 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_mulsd_u128_r64);
5720}
5721
5722
5723/** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */
5724FNIEMOP_DEF(iemOp_cvtps2pd_Vpd_Wps)
5725{
5726 IEMOP_MNEMONIC2(RM, CVTPS2PD, cvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS, 0);
5727 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2pd_u128);
5728}
5729
5730
5731/** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */
5732FNIEMOP_DEF(iemOp_cvtpd2ps_Vps_Wpd)
5733{
5734 IEMOP_MNEMONIC2(RM, CVTPD2PS, cvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS, 0);
5735 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2ps_u128);
5736}
5737
5738
5739/** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */
5740FNIEMOP_DEF(iemOp_cvtss2sd_Vsd_Wss)
5741{
5742 IEMOP_MNEMONIC2(RM, CVTSS2SD, cvtss2sd, Vsd, Wss, DISOPTYPE_HARMLESS, 0);
5743 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_cvtss2sd_u128_r32);
5744}
5745
5746
5747/** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */
5748FNIEMOP_DEF(iemOp_cvtsd2ss_Vss_Wsd)
5749{
5750 IEMOP_MNEMONIC2(RM, CVTSD2SS, cvtsd2ss, Vss, Wsd, DISOPTYPE_HARMLESS, 0);
5751 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_cvtsd2ss_u128_r64);
5752}
5753
5754
5755/** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */
5756FNIEMOP_DEF(iemOp_cvtdq2ps_Vps_Wdq)
5757{
5758 IEMOP_MNEMONIC2(RM, CVTDQ2PS, cvtdq2ps, Vps, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5759 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2ps_u128);
5760}
5761
5762
5763/** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */
5764FNIEMOP_DEF(iemOp_cvtps2dq_Vdq_Wps)
5765{
5766 IEMOP_MNEMONIC2(RM, CVTPS2DQ, cvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5767 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtps2dq_u128);
5768}
5769
5770
5771/** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */
5772FNIEMOP_DEF(iemOp_cvttps2dq_Vdq_Wps)
5773{
5774 IEMOP_MNEMONIC2(RM, CVTTPS2DQ, cvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5775 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttps2dq_u128);
5776}
5777
5778
5779/* Opcode 0xf2 0x0f 0x5b - invalid */
5780
5781
5782/** Opcode 0x0f 0x5c - subps Vps, Wps */
5783FNIEMOP_DEF(iemOp_subps_Vps_Wps)
5784{
5785 IEMOP_MNEMONIC2(RM, SUBPS, subps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5786 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_subps_u128);
5787}
5788
5789
5790/** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */
5791FNIEMOP_DEF(iemOp_subpd_Vpd_Wpd)
5792{
5793 IEMOP_MNEMONIC2(RM, SUBPD, subpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5794 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_subpd_u128);
5795}
5796
5797
5798/** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */
5799FNIEMOP_DEF(iemOp_subss_Vss_Wss)
5800{
5801 IEMOP_MNEMONIC2(RM, SUBSS, subss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5802 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_subss_u128_r32);
5803}
5804
5805
5806/** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */
5807FNIEMOP_DEF(iemOp_subsd_Vsd_Wsd)
5808{
5809 IEMOP_MNEMONIC2(RM, SUBSD, subsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5810 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_subsd_u128_r64);
5811}
5812
5813
5814/** Opcode 0x0f 0x5d - minps Vps, Wps */
5815FNIEMOP_DEF(iemOp_minps_Vps_Wps)
5816{
5817 IEMOP_MNEMONIC2(RM, MINPS, minps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5818 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_minps_u128);
5819}
5820
5821
5822/** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */
5823FNIEMOP_DEF(iemOp_minpd_Vpd_Wpd)
5824{
5825 IEMOP_MNEMONIC2(RM, MINPD, minpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5826 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_minpd_u128);
5827}
5828
5829
5830/** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */
5831FNIEMOP_DEF(iemOp_minss_Vss_Wss)
5832{
5833 IEMOP_MNEMONIC2(RM, MINSS, minss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5834 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_minss_u128_r32);
5835}
5836
5837
5838/** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */
5839FNIEMOP_DEF(iemOp_minsd_Vsd_Wsd)
5840{
5841 IEMOP_MNEMONIC2(RM, MINSD, minsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5842 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_minsd_u128_r64);
5843}
5844
5845
5846/** Opcode 0x0f 0x5e - divps Vps, Wps */
5847FNIEMOP_DEF(iemOp_divps_Vps_Wps)
5848{
5849 IEMOP_MNEMONIC2(RM, DIVPS, divps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5850 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_divps_u128);
5851}
5852
5853
5854/** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */
5855FNIEMOP_DEF(iemOp_divpd_Vpd_Wpd)
5856{
5857 IEMOP_MNEMONIC2(RM, DIVPD, divpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5858 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_divpd_u128);
5859}
5860
5861
5862/** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */
5863FNIEMOP_DEF(iemOp_divss_Vss_Wss)
5864{
5865 IEMOP_MNEMONIC2(RM, DIVSS, divss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5866 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_divss_u128_r32);
5867}
5868
5869
5870/** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */
5871FNIEMOP_DEF(iemOp_divsd_Vsd_Wsd)
5872{
5873 IEMOP_MNEMONIC2(RM, DIVSD, divsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5874 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_divsd_u128_r64);
5875}
5876
5877
5878/** Opcode 0x0f 0x5f - maxps Vps, Wps */
5879FNIEMOP_DEF(iemOp_maxps_Vps_Wps)
5880{
5881 IEMOP_MNEMONIC2(RM, MAXPS, maxps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
5882 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullFull_To_Full, iemAImpl_maxps_u128);
5883}
5884
5885
5886/** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */
5887FNIEMOP_DEF(iemOp_maxpd_Vpd_Wpd)
5888{
5889 IEMOP_MNEMONIC2(RM, MAXPD, maxpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
5890 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_maxpd_u128);
5891}
5892
5893
5894/** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */
5895FNIEMOP_DEF(iemOp_maxss_Vss_Wss)
5896{
5897 IEMOP_MNEMONIC2(RM, MAXSS, maxss, Vss, Wss, DISOPTYPE_HARMLESS, 0);
5898 return FNIEMOP_CALL_1(iemOpCommonSseFp_FullR32_To_Full, iemAImpl_maxss_u128_r32);
5899}
5900
5901
5902/** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */
5903FNIEMOP_DEF(iemOp_maxsd_Vsd_Wsd)
5904{
5905 IEMOP_MNEMONIC2(RM, MAXSD, maxsd, Vsd, Wsd, DISOPTYPE_HARMLESS, 0);
5906 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullR64_To_Full, iemAImpl_maxsd_u128_r64);
5907}
5908
5909
5910/** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */
5911FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd)
5912{
5913 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5914 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklbw_u64);
5915}
5916
5917
5918/** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */
5919FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx)
5920{
5921 IEMOP_MNEMONIC2(RM, PUNPCKLBW, punpcklbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5922 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklbw_u128);
5923}
5924
5925
5926/* Opcode 0xf3 0x0f 0x60 - invalid */
5927
5928
5929/** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */
5930FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd)
5931{
5932 /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
5933 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5934 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpcklwd_u64);
5935}
5936
5937
5938/** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */
5939FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx)
5940{
5941 IEMOP_MNEMONIC2(RM, PUNPCKLWD, punpcklwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5942 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklwd_u128);
5943}
5944
5945
5946/* Opcode 0xf3 0x0f 0x61 - invalid */
5947
5948
5949/** Opcode 0x0f 0x62 - punpckldq Pq, Qd */
5950FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd)
5951{
5952 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5953 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, iemAImpl_punpckldq_u64);
5954}
5955
5956
5957/** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */
5958FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx)
5959{
5960 IEMOP_MNEMONIC2(RM, PUNPCKLDQ, punpckldq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5961 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpckldq_u128);
5962}
5963
5964
5965/* Opcode 0xf3 0x0f 0x62 - invalid */
5966
5967
5968
5969/** Opcode 0x0f 0x63 - packsswb Pq, Qq */
5970FNIEMOP_DEF(iemOp_packsswb_Pq_Qq)
5971{
5972 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
5973 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packsswb_u64);
5974}
5975
5976
5977/** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */
5978FNIEMOP_DEF(iemOp_packsswb_Vx_Wx)
5979{
5980 IEMOP_MNEMONIC2(RM, PACKSSWB, packsswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
5981 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packsswb_u128);
5982}
5983
5984
5985/* Opcode 0xf3 0x0f 0x63 - invalid */
5986
5987
5988/** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */
5989FNIEMOP_DEF(iemOp_pcmpgtb_Pq_Qq)
5990{
5991 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
5992 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtb_u64);
5993}
5994
5995
5996/** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */
5997FNIEMOP_DEF(iemOp_pcmpgtb_Vx_Wx)
5998{
5999 IEMOP_MNEMONIC2(RM, PCMPGTB, pcmpgtb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6000 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtb_u128);
6001}
6002
6003
6004/* Opcode 0xf3 0x0f 0x64 - invalid */
6005
6006
6007/** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */
6008FNIEMOP_DEF(iemOp_pcmpgtw_Pq_Qq)
6009{
6010 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6011 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtw_u64);
6012}
6013
6014
6015/** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */
6016FNIEMOP_DEF(iemOp_pcmpgtw_Vx_Wx)
6017{
6018 IEMOP_MNEMONIC2(RM, PCMPGTW, pcmpgtw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6019 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtw_u128);
6020}
6021
6022
6023/* Opcode 0xf3 0x0f 0x65 - invalid */
6024
6025
6026/** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */
6027FNIEMOP_DEF(iemOp_pcmpgtd_Pq_Qq)
6028{
6029 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6030 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpgtd_u64);
6031}
6032
6033
6034/** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */
6035FNIEMOP_DEF(iemOp_pcmpgtd_Vx_Wx)
6036{
6037 IEMOP_MNEMONIC2(RM, PCMPGTD, pcmpgtd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6038 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpgtd_u128);
6039}
6040
6041
6042/* Opcode 0xf3 0x0f 0x66 - invalid */
6043
6044
6045/** Opcode 0x0f 0x67 - packuswb Pq, Qq */
6046FNIEMOP_DEF(iemOp_packuswb_Pq_Qq)
6047{
6048 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6049 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packuswb_u64);
6050}
6051
6052
6053/** Opcode 0x66 0x0f 0x67 - packuswb Vx, Wx */
6054FNIEMOP_DEF(iemOp_packuswb_Vx_Wx)
6055{
6056 IEMOP_MNEMONIC2(RM, PACKUSWB, packuswb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6057 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packuswb_u128);
6058}
6059
6060
6061/* Opcode 0xf3 0x0f 0x67 - invalid */
6062
6063
6064/** Opcode 0x0f 0x68 - punpckhbw Pq, Qq
6065 * @note Intel and AMD both uses Qd for the second parameter, however they
6066 * both list it as a mmX/mem64 operand and intel describes it as being
6067 * loaded as a qword, so it should be Qq, shouldn't it? */
6068FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq)
6069{
6070 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6071 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhbw_u64);
6072}
6073
6074
6075/** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */
6076FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx)
6077{
6078 IEMOP_MNEMONIC2(RM, PUNPCKHBW, punpckhbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6079 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhbw_u128);
6080}
6081
6082
6083/* Opcode 0xf3 0x0f 0x68 - invalid */
6084
6085
6086/** Opcode 0x0f 0x69 - punpckhwd Pq, Qq
6087 * @note Intel and AMD both uses Qd for the second parameter, however they
6088 * both list it as a mmX/mem64 operand and intel describes it as being
6089 * loaded as a qword, so it should be Qq, shouldn't it? */
6090FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qq)
6091{
6092 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6093 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhwd_u64);
6094}
6095
6096
6097/** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */
6098FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx)
6099{
6100 IEMOP_MNEMONIC2(RM, PUNPCKHWD, punpckhwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6101 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhwd_u128);
6102
6103}
6104
6105
6106/* Opcode 0xf3 0x0f 0x69 - invalid */
6107
6108
6109/** Opcode 0x0f 0x6a - punpckhdq Pq, Qq
6110 * @note Intel and AMD both uses Qd for the second parameter, however they
6111 * both list it as a mmX/mem64 operand and intel describes it as being
6112 * loaded as a qword, so it should be Qq, shouldn't it? */
6113FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qq)
6114{
6115 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6116 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, iemAImpl_punpckhdq_u64);
6117}
6118
6119
6120/** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, Wx */
6121FNIEMOP_DEF(iemOp_punpckhdq_Vx_Wx)
6122{
6123 IEMOP_MNEMONIC2(RM, PUNPCKHDQ, punpckhdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6124 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhdq_u128);
6125}
6126
6127
6128/* Opcode 0xf3 0x0f 0x6a - invalid */
6129
6130
6131/** Opcode 0x0f 0x6b - packssdw Pq, Qd */
6132FNIEMOP_DEF(iemOp_packssdw_Pq_Qd)
6133{
6134 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Pq, Qd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6135 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_packssdw_u64);
6136}
6137
6138
6139/** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */
6140FNIEMOP_DEF(iemOp_packssdw_Vx_Wx)
6141{
6142 IEMOP_MNEMONIC2(RM, PACKSSDW, packssdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6143 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_packssdw_u128);
6144}
6145
6146
6147/* Opcode 0xf3 0x0f 0x6b - invalid */
6148
6149
6150/* Opcode 0x0f 0x6c - invalid */
6151
6152
6153/** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */
6154FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx)
6155{
6156 IEMOP_MNEMONIC2(RM, PUNPCKLQDQ, punpcklqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6157 return FNIEMOP_CALL_1(iemOpCommonSse2_LowLow_To_Full, iemAImpl_punpcklqdq_u128);
6158}
6159
6160
6161/* Opcode 0xf3 0x0f 0x6c - invalid */
6162/* Opcode 0xf2 0x0f 0x6c - invalid */
6163
6164
6165/* Opcode 0x0f 0x6d - invalid */
6166
6167
6168/** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, Wx */
6169FNIEMOP_DEF(iemOp_punpckhqdq_Vx_Wx)
6170{
6171 IEMOP_MNEMONIC2(RM, PUNPCKHQDQ, punpckhqdq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6172 return FNIEMOP_CALL_1(iemOpCommonSse2_HighHigh_To_Full, iemAImpl_punpckhqdq_u128);
6173}
6174
6175
6176/* Opcode 0xf3 0x0f 0x6d - invalid */
6177
6178
6179FNIEMOP_DEF(iemOp_movd_q_Pd_Ey)
6180{
6181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6182 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6183 {
6184 /**
6185 * @opcode 0x6e
6186 * @opcodesub rex.w=1
6187 * @oppfx none
6188 * @opcpuid mmx
6189 * @opgroup og_mmx_datamove
6190 * @opxcpttype 5
6191 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
6192 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
6193 */
6194 IEMOP_MNEMONIC2(RM, MOVQ, movq, Pq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6195 if (IEM_IS_MODRM_REG_MODE(bRm))
6196 {
6197 /* MMX, greg64 */
6198 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6200 IEM_MC_LOCAL(uint64_t, u64Tmp);
6201
6202 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6203 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6204 IEM_MC_FPU_TO_MMX_MODE();
6205
6206 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6207 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6208
6209 IEM_MC_ADVANCE_RIP_AND_FINISH();
6210 IEM_MC_END();
6211 }
6212 else
6213 {
6214 /* MMX, [mem64] */
6215 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6217 IEM_MC_LOCAL(uint64_t, u64Tmp);
6218
6219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6221 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6222 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6223 IEM_MC_FPU_TO_MMX_MODE();
6224
6225 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6226 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6227
6228 IEM_MC_ADVANCE_RIP_AND_FINISH();
6229 IEM_MC_END();
6230 }
6231 }
6232 else
6233 {
6234 /**
6235 * @opdone
6236 * @opcode 0x6e
6237 * @opcodesub rex.w=0
6238 * @oppfx none
6239 * @opcpuid mmx
6240 * @opgroup og_mmx_datamove
6241 * @opxcpttype 5
6242 * @opfunction iemOp_movd_q_Pd_Ey
6243 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6244 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6245 */
6246 IEMOP_MNEMONIC2(RM, MOVD, movd, PdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
6247 if (IEM_IS_MODRM_REG_MODE(bRm))
6248 {
6249 /* MMX, greg32 */
6250 IEM_MC_BEGIN(0, 1, 0, 0);
6251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6252 IEM_MC_LOCAL(uint32_t, u32Tmp);
6253
6254 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6255 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6256 IEM_MC_FPU_TO_MMX_MODE();
6257
6258 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6259 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6260
6261 IEM_MC_ADVANCE_RIP_AND_FINISH();
6262 IEM_MC_END();
6263 }
6264 else
6265 {
6266 /* MMX, [mem32] */
6267 IEM_MC_BEGIN(0, 2, 0, 0);
6268 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6269 IEM_MC_LOCAL(uint32_t, u32Tmp);
6270
6271 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6273 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6274 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6275 IEM_MC_FPU_TO_MMX_MODE();
6276
6277 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6278 IEM_MC_STORE_MREG_U32_ZX_U64(IEM_GET_MODRM_REG_8(bRm), u32Tmp);
6279
6280 IEM_MC_ADVANCE_RIP_AND_FINISH();
6281 IEM_MC_END();
6282 }
6283 }
6284}
6285
6286FNIEMOP_DEF(iemOp_movd_q_Vy_Ey)
6287{
6288 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6289 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
6290 {
6291 /**
6292 * @opcode 0x6e
6293 * @opcodesub rex.w=1
6294 * @oppfx 0x66
6295 * @opcpuid sse2
6296 * @opgroup og_sse2_simdint_datamove
6297 * @opxcpttype 5
6298 * @optest 64-bit / op1=1 op2=2 -> op1=2
6299 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
6300 */
6301 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6302 if (IEM_IS_MODRM_REG_MODE(bRm))
6303 {
6304 /* XMM, greg64 */
6305 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6307 IEM_MC_LOCAL(uint64_t, u64Tmp);
6308
6309 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6310 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6311
6312 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6313 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6314
6315 IEM_MC_ADVANCE_RIP_AND_FINISH();
6316 IEM_MC_END();
6317 }
6318 else
6319 {
6320 /* XMM, [mem64] */
6321 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6323 IEM_MC_LOCAL(uint64_t, u64Tmp);
6324
6325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6327 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6328 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6329
6330 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6331 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
6332
6333 IEM_MC_ADVANCE_RIP_AND_FINISH();
6334 IEM_MC_END();
6335 }
6336 }
6337 else
6338 {
6339 /**
6340 * @opdone
6341 * @opcode 0x6e
6342 * @opcodesub rex.w=0
6343 * @oppfx 0x66
6344 * @opcpuid sse2
6345 * @opgroup og_sse2_simdint_datamove
6346 * @opxcpttype 5
6347 * @opfunction iemOp_movd_q_Vy_Ey
6348 * @optest op1=1 op2=2 -> op1=2
6349 * @optest op1=0 op2=-42 -> op1=-42
6350 */
6351 IEMOP_MNEMONIC2(RM, MOVD, movd, VdZx_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
6352 if (IEM_IS_MODRM_REG_MODE(bRm))
6353 {
6354 /* XMM, greg32 */
6355 IEM_MC_BEGIN(0, 1, 0, 0);
6356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6357 IEM_MC_LOCAL(uint32_t, u32Tmp);
6358
6359 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6360 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6361
6362 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
6363 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6364
6365 IEM_MC_ADVANCE_RIP_AND_FINISH();
6366 IEM_MC_END();
6367 }
6368 else
6369 {
6370 /* XMM, [mem32] */
6371 IEM_MC_BEGIN(0, 2, 0, 0);
6372 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6373 IEM_MC_LOCAL(uint32_t, u32Tmp);
6374
6375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6377 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6378 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6379
6380 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6381 IEM_MC_STORE_XREG_U32_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
6382
6383 IEM_MC_ADVANCE_RIP_AND_FINISH();
6384 IEM_MC_END();
6385 }
6386 }
6387}
6388
6389/* Opcode 0xf3 0x0f 0x6e - invalid */
6390
6391
6392/**
6393 * @opcode 0x6f
6394 * @oppfx none
6395 * @opcpuid mmx
6396 * @opgroup og_mmx_datamove
6397 * @opxcpttype 5
6398 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
6399 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
6400 */
6401FNIEMOP_DEF(iemOp_movq_Pq_Qq)
6402{
6403 IEMOP_MNEMONIC2(RM, MOVD, movd, Pq_WO, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
6404 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6405 if (IEM_IS_MODRM_REG_MODE(bRm))
6406 {
6407 /*
6408 * Register, register.
6409 */
6410 IEM_MC_BEGIN(0, 1, 0, 0);
6411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6412 IEM_MC_LOCAL(uint64_t, u64Tmp);
6413
6414 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6415 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6416 IEM_MC_FPU_TO_MMX_MODE();
6417
6418 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_RM_8(bRm));
6419 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6420
6421 IEM_MC_ADVANCE_RIP_AND_FINISH();
6422 IEM_MC_END();
6423 }
6424 else
6425 {
6426 /*
6427 * Register, memory.
6428 */
6429 IEM_MC_BEGIN(0, 2, 0, 0);
6430 IEM_MC_LOCAL(uint64_t, u64Tmp);
6431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6432
6433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6435 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6436 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6437 IEM_MC_FPU_TO_MMX_MODE();
6438
6439 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6440 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), u64Tmp);
6441
6442 IEM_MC_ADVANCE_RIP_AND_FINISH();
6443 IEM_MC_END();
6444 }
6445}
6446
6447/**
6448 * @opcode 0x6f
6449 * @oppfx 0x66
6450 * @opcpuid sse2
6451 * @opgroup og_sse2_simdint_datamove
6452 * @opxcpttype 1
6453 * @optest op1=1 op2=2 -> op1=2
6454 * @optest op1=0 op2=-42 -> op1=-42
6455 */
6456FNIEMOP_DEF(iemOp_movdqa_Vdq_Wdq)
6457{
6458 IEMOP_MNEMONIC2(RM, MOVDQA, movdqa, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6460 if (IEM_IS_MODRM_REG_MODE(bRm))
6461 {
6462 /*
6463 * Register, register.
6464 */
6465 IEM_MC_BEGIN(0, 0, 0, 0);
6466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6467
6468 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6469 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6470
6471 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6472 IEM_GET_MODRM_RM(pVCpu, bRm));
6473 IEM_MC_ADVANCE_RIP_AND_FINISH();
6474 IEM_MC_END();
6475 }
6476 else
6477 {
6478 /*
6479 * Register, memory.
6480 */
6481 IEM_MC_BEGIN(0, 2, 0, 0);
6482 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6484
6485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6487 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6488 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6489
6490 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6491 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6492
6493 IEM_MC_ADVANCE_RIP_AND_FINISH();
6494 IEM_MC_END();
6495 }
6496}
6497
6498/**
6499 * @opcode 0x6f
6500 * @oppfx 0xf3
6501 * @opcpuid sse2
6502 * @opgroup og_sse2_simdint_datamove
6503 * @opxcpttype 4UA
6504 * @optest op1=1 op2=2 -> op1=2
6505 * @optest op1=0 op2=-42 -> op1=-42
6506 */
6507FNIEMOP_DEF(iemOp_movdqu_Vdq_Wdq)
6508{
6509 IEMOP_MNEMONIC2(RM, MOVDQU, movdqu, Vdq_WO, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
6510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6511 if (IEM_IS_MODRM_REG_MODE(bRm))
6512 {
6513 /*
6514 * Register, register.
6515 */
6516 IEM_MC_BEGIN(0, 0, 0, 0);
6517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6518 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6519 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6520 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm),
6521 IEM_GET_MODRM_RM(pVCpu, bRm));
6522 IEM_MC_ADVANCE_RIP_AND_FINISH();
6523 IEM_MC_END();
6524 }
6525 else
6526 {
6527 /*
6528 * Register, memory.
6529 */
6530 IEM_MC_BEGIN(0, 2, 0, 0);
6531 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
6532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6533
6534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6536 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6537 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
6538 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6539 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
6540
6541 IEM_MC_ADVANCE_RIP_AND_FINISH();
6542 IEM_MC_END();
6543 }
6544}
6545
6546
6547/** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */
6548FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib)
6549{
6550 IEMOP_MNEMONIC3(RMI, PSHUFW, pshufw, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6551 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6552 if (IEM_IS_MODRM_REG_MODE(bRm))
6553 {
6554 /*
6555 * Register, register.
6556 */
6557 IEM_MC_BEGIN(3, 0, 0, 0);
6558 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6560 IEM_MC_ARG(uint64_t *, pDst, 0);
6561 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6562 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6563 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6564 IEM_MC_PREPARE_FPU_USAGE();
6565 IEM_MC_FPU_TO_MMX_MODE();
6566
6567 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6568 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
6569 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6570 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6571
6572 IEM_MC_ADVANCE_RIP_AND_FINISH();
6573 IEM_MC_END();
6574 }
6575 else
6576 {
6577 /*
6578 * Register, memory.
6579 */
6580 IEM_MC_BEGIN(3, 2, 0, 0);
6581 IEM_MC_ARG(uint64_t *, pDst, 0);
6582 IEM_MC_LOCAL(uint64_t, uSrc);
6583 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
6584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6585
6586 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6587 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6588 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
6590 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6591 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6592
6593 IEM_MC_PREPARE_FPU_USAGE();
6594 IEM_MC_FPU_TO_MMX_MODE();
6595
6596 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
6597 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pshufw_u64, pDst, pSrc, bImmArg);
6598 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6599
6600 IEM_MC_ADVANCE_RIP_AND_FINISH();
6601 IEM_MC_END();
6602 }
6603}
6604
6605
6606/**
6607 * Common worker for SSE2 instructions on the forms:
6608 * pshufd xmm1, xmm2/mem128, imm8
6609 * pshufhw xmm1, xmm2/mem128, imm8
6610 * pshuflw xmm1, xmm2/mem128, imm8
6611 *
6612 * Proper alignment of the 128-bit operand is enforced.
6613 * Exceptions type 4. SSE2 cpuid checks.
6614 */
6615FNIEMOP_DEF_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnWorker)
6616{
6617 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6618 if (IEM_IS_MODRM_REG_MODE(bRm))
6619 {
6620 /*
6621 * Register, register.
6622 */
6623 IEM_MC_BEGIN(3, 0, 0, 0);
6624 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6626 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6627 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6628 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6629 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6630 IEM_MC_PREPARE_SSE_USAGE();
6631 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6632 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6633 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6634 IEM_MC_ADVANCE_RIP_AND_FINISH();
6635 IEM_MC_END();
6636 }
6637 else
6638 {
6639 /*
6640 * Register, memory.
6641 */
6642 IEM_MC_BEGIN(3, 2, 0, 0);
6643 IEM_MC_ARG(PRTUINT128U, puDst, 0);
6644 IEM_MC_LOCAL(RTUINT128U, uSrc);
6645 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
6646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6647
6648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
6649 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6650 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
6651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6652 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6653
6654 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6655 IEM_MC_PREPARE_SSE_USAGE();
6656 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6657 IEM_MC_CALL_VOID_AIMPL_3(pfnWorker, puDst, puSrc, bImmArg);
6658
6659 IEM_MC_ADVANCE_RIP_AND_FINISH();
6660 IEM_MC_END();
6661 }
6662}
6663
6664
6665/** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */
6666FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib)
6667{
6668 IEMOP_MNEMONIC3(RMI, PSHUFD, pshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6669 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128);
6670}
6671
6672
6673/** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */
6674FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib)
6675{
6676 IEMOP_MNEMONIC3(RMI, PSHUFHW, pshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6677 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128);
6678}
6679
6680
6681/** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */
6682FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib)
6683{
6684 IEMOP_MNEMONIC3(RMI, PSHUFLW, pshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6685 return FNIEMOP_CALL_1(iemOpCommonSse2_pshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128);
6686}
6687
6688
6689/**
6690 * Common worker for MMX instructions of the form:
6691 * psrlw mm, imm8
6692 * psraw mm, imm8
6693 * psllw mm, imm8
6694 * psrld mm, imm8
6695 * psrad mm, imm8
6696 * pslld mm, imm8
6697 * psrlq mm, imm8
6698 * psllq mm, imm8
6699 *
6700 */
6701FNIEMOP_DEF_2(iemOpCommonMmx_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU64, pfnU64)
6702{
6703 if (IEM_IS_MODRM_REG_MODE(bRm))
6704 {
6705 /*
6706 * Register, immediate.
6707 */
6708 IEM_MC_BEGIN(2, 0, 0, 0);
6709 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
6711 IEM_MC_ARG(uint64_t *, pDst, 0);
6712 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6713 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
6714 IEM_MC_PREPARE_FPU_USAGE();
6715 IEM_MC_FPU_TO_MMX_MODE();
6716
6717 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_RM_8(bRm));
6718 IEM_MC_CALL_VOID_AIMPL_2(pfnU64, pDst, bShiftArg);
6719 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
6720
6721 IEM_MC_ADVANCE_RIP_AND_FINISH();
6722 IEM_MC_END();
6723 }
6724 else
6725 {
6726 /*
6727 * Register, memory not supported.
6728 */
6729 /// @todo Caller already enforced register mode?!
6730 AssertFailedReturn(VINF_SUCCESS);
6731 }
6732}
6733
6734
6735/**
6736 * Common worker for SSE2 instructions of the form:
6737 * psrlw xmm, imm8
6738 * psraw xmm, imm8
6739 * psllw xmm, imm8
6740 * psrld xmm, imm8
6741 * psrad xmm, imm8
6742 * pslld xmm, imm8
6743 * psrlq xmm, imm8
6744 * psllq xmm, imm8
6745 *
6746 */
6747FNIEMOP_DEF_2(iemOpCommonSse2_Shift_Imm, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHIFTU128, pfnU128)
6748{
6749 if (IEM_IS_MODRM_REG_MODE(bRm))
6750 {
6751 /*
6752 * Register, immediate.
6753 */
6754 IEM_MC_BEGIN(2, 0, 0, 0);
6755 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
6757 IEM_MC_ARG(PRTUINT128U, pDst, 0);
6758 IEM_MC_ARG_CONST(uint8_t, bShiftArg, /*=*/ bImm, 1);
6759 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
6760 IEM_MC_PREPARE_SSE_USAGE();
6761 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_RM(pVCpu, bRm));
6762 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, pDst, bShiftArg);
6763 IEM_MC_ADVANCE_RIP_AND_FINISH();
6764 IEM_MC_END();
6765 }
6766 else
6767 {
6768 /*
6769 * Register, memory.
6770 */
6771 /// @todo Caller already enforced register mode?!
6772 AssertFailedReturn(VINF_SUCCESS);
6773 }
6774}
6775
6776
6777/** Opcode 0x0f 0x71 11/2 - psrlw Nq, Ib */
6778FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Nq_Ib)
6779{
6780// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6781 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlw_imm_u64);
6782}
6783
6784
6785/** Opcode 0x66 0x0f 0x71 11/2. */
6786FNIEMOPRM_DEF(iemOp_Grp12_psrlw_Ux_Ib)
6787{
6788// IEMOP_MNEMONIC2(RI, PSRLW, psrlw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6789 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlw_imm_u128);
6790}
6791
6792
6793/** Opcode 0x0f 0x71 11/4. */
6794FNIEMOPRM_DEF(iemOp_Grp12_psraw_Nq_Ib)
6795{
6796// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6797 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psraw_imm_u64);
6798}
6799
6800
6801/** Opcode 0x66 0x0f 0x71 11/4. */
6802FNIEMOPRM_DEF(iemOp_Grp12_psraw_Ux_Ib)
6803{
6804// IEMOP_MNEMONIC2(RI, PSRAW, psraw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6805 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psraw_imm_u128);
6806}
6807
6808
6809/** Opcode 0x0f 0x71 11/6. */
6810FNIEMOPRM_DEF(iemOp_Grp12_psllw_Nq_Ib)
6811{
6812// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6813 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllw_imm_u64);
6814}
6815
6816
6817/** Opcode 0x66 0x0f 0x71 11/6. */
6818FNIEMOPRM_DEF(iemOp_Grp12_psllw_Ux_Ib)
6819{
6820// IEMOP_MNEMONIC2(RI, PSLLW, psllw, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6821 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllw_imm_u128);
6822}
6823
6824
6825/**
6826 * Group 12 jump table for register variant.
6827 */
6828IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] =
6829{
6830 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6831 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6832 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6833 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6834 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6835 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6836 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6837 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6838};
6839AssertCompile(RT_ELEMENTS(g_apfnGroup12RegReg) == 8*4);
6840
6841
6842/** Opcode 0x0f 0x71. */
6843FNIEMOP_DEF(iemOp_Grp12)
6844{
6845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6846 if (IEM_IS_MODRM_REG_MODE(bRm))
6847 /* register, register */
6848 return FNIEMOP_CALL_1(g_apfnGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6849 + pVCpu->iem.s.idxPrefix], bRm);
6850 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6851}
6852
6853
6854/** Opcode 0x0f 0x72 11/2. */
6855FNIEMOPRM_DEF(iemOp_Grp13_psrld_Nq_Ib)
6856{
6857// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6858 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrld_imm_u64);
6859}
6860
6861
6862/** Opcode 0x66 0x0f 0x72 11/2. */
6863FNIEMOPRM_DEF(iemOp_Grp13_psrld_Ux_Ib)
6864{
6865// IEMOP_MNEMONIC2(RI, PSRLD, psrld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6866 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrld_imm_u128);
6867}
6868
6869
6870/** Opcode 0x0f 0x72 11/4. */
6871FNIEMOPRM_DEF(iemOp_Grp13_psrad_Nq_Ib)
6872{
6873// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6874 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrad_imm_u64);
6875}
6876
6877
6878/** Opcode 0x66 0x0f 0x72 11/4. */
6879FNIEMOPRM_DEF(iemOp_Grp13_psrad_Ux_Ib)
6880{
6881// IEMOP_MNEMONIC2(RI, PSRAD, psrad, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6882 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrad_imm_u128);
6883}
6884
6885
6886/** Opcode 0x0f 0x72 11/6. */
6887FNIEMOPRM_DEF(iemOp_Grp13_pslld_Nq_Ib)
6888{
6889// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6890 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_pslld_imm_u64);
6891}
6892
6893/** Opcode 0x66 0x0f 0x72 11/6. */
6894FNIEMOPRM_DEF(iemOp_Grp13_pslld_Ux_Ib)
6895{
6896// IEMOP_MNEMONIC2(RI, PSLLD, pslld, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6897 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslld_imm_u128);
6898}
6899
6900
6901/**
6902 * Group 13 jump table for register variant.
6903 */
6904IEM_STATIC const PFNIEMOPRM g_apfnGroup13RegReg[] =
6905{
6906 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6907 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6908 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6909 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6910 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6911 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6912 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6913 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
6914};
6915AssertCompile(RT_ELEMENTS(g_apfnGroup13RegReg) == 8*4);
6916
6917/** Opcode 0x0f 0x72. */
6918FNIEMOP_DEF(iemOp_Grp13)
6919{
6920 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6921 if (IEM_IS_MODRM_REG_MODE(bRm))
6922 /* register, register */
6923 return FNIEMOP_CALL_1(g_apfnGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
6924 + pVCpu->iem.s.idxPrefix], bRm);
6925 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
6926}
6927
6928
6929/** Opcode 0x0f 0x73 11/2. */
6930FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Nq_Ib)
6931{
6932// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6933 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psrlq_imm_u64);
6934}
6935
6936
6937/** Opcode 0x66 0x0f 0x73 11/2. */
6938FNIEMOPRM_DEF(iemOp_Grp14_psrlq_Ux_Ib)
6939{
6940// IEMOP_MNEMONIC2(RI, PSRLQ, psrlq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6941 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrlq_imm_u128);
6942}
6943
6944
6945/** Opcode 0x66 0x0f 0x73 11/3. */
6946FNIEMOPRM_DEF(iemOp_Grp14_psrldq_Ux_Ib)
6947{
6948// IEMOP_MNEMONIC2(RI, PSRLDQ, psrldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6949 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psrldq_imm_u128);
6950}
6951
6952
6953/** Opcode 0x0f 0x73 11/6. */
6954FNIEMOPRM_DEF(iemOp_Grp14_psllq_Nq_Ib)
6955{
6956// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
6957 return FNIEMOP_CALL_2(iemOpCommonMmx_Shift_Imm, bRm, iemAImpl_psllq_imm_u64);
6958}
6959
6960
6961/** Opcode 0x66 0x0f 0x73 11/6. */
6962FNIEMOPRM_DEF(iemOp_Grp14_psllq_Ux_Ib)
6963{
6964// IEMOP_MNEMONIC2(RI, PSLLQ, psllq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6965 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_psllq_imm_u128);
6966}
6967
6968
6969/** Opcode 0x66 0x0f 0x73 11/7. */
6970FNIEMOPRM_DEF(iemOp_Grp14_pslldq_Ux_Ib)
6971{
6972// IEMOP_MNEMONIC2(RI, PSLLDQ, pslldq, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
6973 return FNIEMOP_CALL_2(iemOpCommonSse2_Shift_Imm, bRm, iemAImpl_pslldq_imm_u128);
6974}
6975
6976/**
6977 * Group 14 jump table for register variant.
6978 */
6979IEM_STATIC const PFNIEMOPRM g_apfnGroup14RegReg[] =
6980{
6981 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6982 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6983 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6984 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6985 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6986 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
6987 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6988 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
6989};
6990AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4);
6991
6992
6993/** Opcode 0x0f 0x73. */
6994FNIEMOP_DEF(iemOp_Grp14)
6995{
6996 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6997 if (IEM_IS_MODRM_REG_MODE(bRm))
6998 /* register, register */
6999 return FNIEMOP_CALL_1(g_apfnGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
7000 + pVCpu->iem.s.idxPrefix], bRm);
7001 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
7002}
7003
7004
7005/** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */
7006FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq)
7007{
7008 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7009 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqb_u64);
7010}
7011
7012
7013/** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */
7014FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx)
7015{
7016 IEMOP_MNEMONIC2(RM, PCMPEQB, pcmpeqb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7017 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqb_u128);
7018}
7019
7020
7021/* Opcode 0xf3 0x0f 0x74 - invalid */
7022/* Opcode 0xf2 0x0f 0x74 - invalid */
7023
7024
7025/** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */
7026FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq)
7027{
7028 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7029 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqw_u64);
7030}
7031
7032
7033/** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */
7034FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx)
7035{
7036 IEMOP_MNEMONIC2(RM, PCMPEQW, pcmpeqw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7037 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqw_u128);
7038}
7039
7040
7041/* Opcode 0xf3 0x0f 0x75 - invalid */
7042/* Opcode 0xf2 0x0f 0x75 - invalid */
7043
7044
7045/** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */
7046FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq)
7047{
7048 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7049 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pcmpeqd_u64);
7050}
7051
7052
7053/** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */
7054FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx)
7055{
7056 IEMOP_MNEMONIC2(RM, PCMPEQD, pcmpeqd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
7057 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pcmpeqd_u128);
7058}
7059
7060
7061/* Opcode 0xf3 0x0f 0x76 - invalid */
7062/* Opcode 0xf2 0x0f 0x76 - invalid */
7063
7064
7065/** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */
7066FNIEMOP_DEF(iemOp_emms)
7067{
7068 IEMOP_MNEMONIC(emms, "emms");
7069 IEM_MC_BEGIN(0, 0, 0, 0);
7070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7071 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7072 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7073 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7074 IEM_MC_FPU_FROM_MMX_MODE();
7075 IEM_MC_ADVANCE_RIP_AND_FINISH();
7076 IEM_MC_END();
7077}
7078
7079/* Opcode 0x66 0x0f 0x77 - invalid */
7080/* Opcode 0xf3 0x0f 0x77 - invalid */
7081/* Opcode 0xf2 0x0f 0x77 - invalid */
7082
7083/** Opcode 0x0f 0x78 - VMREAD Ey, Gy */
7084#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7085FNIEMOP_DEF(iemOp_vmread_Ey_Gy)
7086{
7087 IEMOP_MNEMONIC(vmread, "vmread Ey,Gy");
7088 IEMOP_HLP_IN_VMX_OPERATION("vmread", kVmxVDiag_Vmread);
7089 IEMOP_HLP_VMX_INSTR("vmread", kVmxVDiag_Vmread);
7090 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7091
7092 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7093 if (IEM_IS_MODRM_REG_MODE(bRm))
7094 {
7095 /*
7096 * Register, register.
7097 */
7098 if (enmEffOpSize == IEMMODE_64BIT)
7099 {
7100 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
7101 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7102 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7103 IEM_MC_ARG(uint64_t, u64Enc, 1);
7104 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7105 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7106 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmread_reg64, pu64Dst, u64Enc);
7107 IEM_MC_END();
7108 }
7109 else
7110 {
7111 IEM_MC_BEGIN(2, 0, 0, 0);
7112 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7113 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7114 IEM_MC_ARG(uint32_t, u32Enc, 1);
7115 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7116 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7117 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmread_reg32, pu64Dst, u32Enc);
7118 IEM_MC_END();
7119 }
7120 }
7121 else
7122 {
7123 /*
7124 * Memory, register.
7125 */
7126 if (enmEffOpSize == IEMMODE_64BIT)
7127 {
7128 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
7129 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7131 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7132 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7133 IEM_MC_ARG(uint64_t, u64Enc, 2);
7134 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7135 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7136 iemCImpl_vmread_mem_reg64, iEffSeg, GCPtrVal, u64Enc);
7137 IEM_MC_END();
7138 }
7139 else
7140 {
7141 IEM_MC_BEGIN(3, 0, 0, 0);
7142 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7144 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7145 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7146 IEM_MC_ARG(uint32_t, u32Enc, 2);
7147 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7148 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7149 iemCImpl_vmread_mem_reg32, iEffSeg, GCPtrVal, u32Enc);
7150 IEM_MC_END();
7151 }
7152 }
7153}
7154#else
7155FNIEMOP_STUB(iemOp_vmread_Ey_Gy);
7156#endif
7157
7158/* Opcode 0x66 0x0f 0x78 - AMD Group 17 */
7159FNIEMOP_STUB(iemOp_AmdGrp17);
7160/* Opcode 0xf3 0x0f 0x78 - invalid */
7161/* Opcode 0xf2 0x0f 0x78 - invalid */
7162
7163/** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */
7164#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
7165FNIEMOP_DEF(iemOp_vmwrite_Gy_Ey)
7166{
7167 IEMOP_MNEMONIC(vmwrite, "vmwrite Gy,Ey");
7168 IEMOP_HLP_IN_VMX_OPERATION("vmwrite", kVmxVDiag_Vmwrite);
7169 IEMOP_HLP_VMX_INSTR("vmwrite", kVmxVDiag_Vmwrite);
7170 IEMMODE const enmEffOpSize = IEM_IS_64BIT_CODE(pVCpu) ? IEMMODE_64BIT : IEMMODE_32BIT;
7171
7172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7173 if (IEM_IS_MODRM_REG_MODE(bRm))
7174 {
7175 /*
7176 * Register, register.
7177 */
7178 if (enmEffOpSize == IEMMODE_64BIT)
7179 {
7180 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
7181 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7182 IEM_MC_ARG(uint64_t, u64Val, 0);
7183 IEM_MC_ARG(uint64_t, u64Enc, 1);
7184 IEM_MC_FETCH_GREG_U64(u64Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7185 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7186 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmwrite_reg, u64Val, u64Enc);
7187 IEM_MC_END();
7188 }
7189 else
7190 {
7191 IEM_MC_BEGIN(2, 0, 0, 0);
7192 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7193 IEM_MC_ARG(uint32_t, u32Val, 0);
7194 IEM_MC_ARG(uint32_t, u32Enc, 1);
7195 IEM_MC_FETCH_GREG_U32(u32Val, IEM_GET_MODRM_RM(pVCpu, bRm));
7196 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7197 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmwrite_reg, u32Val, u32Enc);
7198 IEM_MC_END();
7199 }
7200 }
7201 else
7202 {
7203 /*
7204 * Register, memory.
7205 */
7206 if (enmEffOpSize == IEMMODE_64BIT)
7207 {
7208 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
7209 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7211 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7212 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7213 IEM_MC_ARG(uint64_t, u64Enc, 2);
7214 IEM_MC_FETCH_GREG_U64(u64Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7215 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7216 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u64Enc);
7217 IEM_MC_END();
7218 }
7219 else
7220 {
7221 IEM_MC_BEGIN(3, 0, 0, 0);
7222 IEM_MC_ARG(RTGCPTR, GCPtrVal, 1);
7223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrVal, bRm, 0);
7224 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
7225 IEM_MC_ARG(uint32_t, u32Enc, 2);
7226 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
7227 IEM_MC_FETCH_GREG_U32(u32Enc, IEM_GET_MODRM_REG(pVCpu, bRm));
7228 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
7229 iemCImpl_vmwrite_mem, iEffSeg, GCPtrVal, u32Enc);
7230 IEM_MC_END();
7231 }
7232 }
7233}
7234#else
7235FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey);
7236#endif
7237/* Opcode 0x66 0x0f 0x79 - invalid */
7238/* Opcode 0xf3 0x0f 0x79 - invalid */
7239/* Opcode 0xf2 0x0f 0x79 - invalid */
7240
7241/* Opcode 0x0f 0x7a - invalid */
7242/* Opcode 0x66 0x0f 0x7a - invalid */
7243/* Opcode 0xf3 0x0f 0x7a - invalid */
7244/* Opcode 0xf2 0x0f 0x7a - invalid */
7245
7246/* Opcode 0x0f 0x7b - invalid */
7247/* Opcode 0x66 0x0f 0x7b - invalid */
7248/* Opcode 0xf3 0x0f 0x7b - invalid */
7249/* Opcode 0xf2 0x0f 0x7b - invalid */
7250
7251/* Opcode 0x0f 0x7c - invalid */
7252
7253
7254/** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */
7255FNIEMOP_DEF(iemOp_haddpd_Vpd_Wpd)
7256{
7257 IEMOP_MNEMONIC2(RM, HADDPD, haddpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7258 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddpd_u128);
7259}
7260
7261
7262/* Opcode 0xf3 0x0f 0x7c - invalid */
7263
7264
7265/** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */
7266FNIEMOP_DEF(iemOp_haddps_Vps_Wps)
7267{
7268 IEMOP_MNEMONIC2(RM, HADDPS, haddps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7269 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_haddps_u128);
7270}
7271
7272
7273/* Opcode 0x0f 0x7d - invalid */
7274
7275
7276/** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */
7277FNIEMOP_DEF(iemOp_hsubpd_Vpd_Wpd)
7278{
7279 IEMOP_MNEMONIC2(RM, HSUBPD, hsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS, 0);
7280 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubpd_u128);
7281}
7282
7283
7284/* Opcode 0xf3 0x0f 0x7d - invalid */
7285
7286
7287/** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */
7288FNIEMOP_DEF(iemOp_hsubps_Vps_Wps)
7289{
7290 IEMOP_MNEMONIC2(RM, HSUBPS, hsubps, Vps, Wps, DISOPTYPE_HARMLESS, 0);
7291 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_hsubps_u128);
7292}
7293
7294
7295/** Opcode 0x0f 0x7e - movd_q Ey, Pd */
7296FNIEMOP_DEF(iemOp_movd_q_Ey_Pd)
7297{
7298 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7299 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7300 {
7301 /**
7302 * @opcode 0x7e
7303 * @opcodesub rex.w=1
7304 * @oppfx none
7305 * @opcpuid mmx
7306 * @opgroup og_mmx_datamove
7307 * @opxcpttype 5
7308 * @optest 64-bit / op1=1 op2=2 -> op1=2 ftw=0xff
7309 * @optest 64-bit / op1=0 op2=-42 -> op1=-42 ftw=0xff
7310 */
7311 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7312 if (IEM_IS_MODRM_REG_MODE(bRm))
7313 {
7314 /* greg64, MMX */
7315 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
7316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7317 IEM_MC_LOCAL(uint64_t, u64Tmp);
7318
7319 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7320 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7321 IEM_MC_FPU_TO_MMX_MODE();
7322
7323 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7324 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7325
7326 IEM_MC_ADVANCE_RIP_AND_FINISH();
7327 IEM_MC_END();
7328 }
7329 else
7330 {
7331 /* [mem64], MMX */
7332 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
7333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7334 IEM_MC_LOCAL(uint64_t, u64Tmp);
7335
7336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7338 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7339 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7340 IEM_MC_FPU_TO_MMX_MODE();
7341
7342 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7343 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7344
7345 IEM_MC_ADVANCE_RIP_AND_FINISH();
7346 IEM_MC_END();
7347 }
7348 }
7349 else
7350 {
7351 /**
7352 * @opdone
7353 * @opcode 0x7e
7354 * @opcodesub rex.w=0
7355 * @oppfx none
7356 * @opcpuid mmx
7357 * @opgroup og_mmx_datamove
7358 * @opxcpttype 5
7359 * @opfunction iemOp_movd_q_Pd_Ey
7360 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
7361 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
7362 */
7363 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Pd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX);
7364 if (IEM_IS_MODRM_REG_MODE(bRm))
7365 {
7366 /* greg32, MMX */
7367 IEM_MC_BEGIN(0, 1, 0, 0);
7368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7369 IEM_MC_LOCAL(uint32_t, u32Tmp);
7370
7371 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7372 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7373 IEM_MC_FPU_TO_MMX_MODE();
7374
7375 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7376 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7377
7378 IEM_MC_ADVANCE_RIP_AND_FINISH();
7379 IEM_MC_END();
7380 }
7381 else
7382 {
7383 /* [mem32], MMX */
7384 IEM_MC_BEGIN(0, 2, 0, 0);
7385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7386 IEM_MC_LOCAL(uint32_t, u32Tmp);
7387
7388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7390 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7391 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7392 IEM_MC_FPU_TO_MMX_MODE();
7393
7394 IEM_MC_FETCH_MREG_U32(u32Tmp, IEM_GET_MODRM_REG_8(bRm));
7395 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7396
7397 IEM_MC_ADVANCE_RIP_AND_FINISH();
7398 IEM_MC_END();
7399 }
7400 }
7401}
7402
7403
7404FNIEMOP_DEF(iemOp_movd_q_Ey_Vy)
7405{
7406 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7407 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
7408 {
7409 /**
7410 * @opcode 0x7e
7411 * @opcodesub rex.w=1
7412 * @oppfx 0x66
7413 * @opcpuid sse2
7414 * @opgroup og_sse2_simdint_datamove
7415 * @opxcpttype 5
7416 * @optest 64-bit / op1=1 op2=2 -> op1=2
7417 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
7418 */
7419 IEMOP_MNEMONIC2(MR, MOVQ, movq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7420 if (IEM_IS_MODRM_REG_MODE(bRm))
7421 {
7422 /* greg64, XMM */
7423 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
7424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7425 IEM_MC_LOCAL(uint64_t, u64Tmp);
7426
7427 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7428 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7429
7430 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7431 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
7432
7433 IEM_MC_ADVANCE_RIP_AND_FINISH();
7434 IEM_MC_END();
7435 }
7436 else
7437 {
7438 /* [mem64], XMM */
7439 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
7440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7441 IEM_MC_LOCAL(uint64_t, u64Tmp);
7442
7443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7445 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7446 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7447
7448 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
7449 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7450
7451 IEM_MC_ADVANCE_RIP_AND_FINISH();
7452 IEM_MC_END();
7453 }
7454 }
7455 else
7456 {
7457 /**
7458 * @opdone
7459 * @opcode 0x7e
7460 * @opcodesub rex.w=0
7461 * @oppfx 0x66
7462 * @opcpuid sse2
7463 * @opgroup og_sse2_simdint_datamove
7464 * @opxcpttype 5
7465 * @opfunction iemOp_movd_q_Vy_Ey
7466 * @optest op1=1 op2=2 -> op1=2
7467 * @optest op1=0 op2=-42 -> op1=-42
7468 */
7469 IEMOP_MNEMONIC2(MR, MOVD, movd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
7470 if (IEM_IS_MODRM_REG_MODE(bRm))
7471 {
7472 /* greg32, XMM */
7473 IEM_MC_BEGIN(0, 1, 0, 0);
7474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7475 IEM_MC_LOCAL(uint32_t, u32Tmp);
7476
7477 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7478 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7479
7480 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7481 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
7482
7483 IEM_MC_ADVANCE_RIP_AND_FINISH();
7484 IEM_MC_END();
7485 }
7486 else
7487 {
7488 /* [mem32], XMM */
7489 IEM_MC_BEGIN(0, 2, 0, 0);
7490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7491 IEM_MC_LOCAL(uint32_t, u32Tmp);
7492
7493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7495 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7496 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7497
7498 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/);
7499 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
7500
7501 IEM_MC_ADVANCE_RIP_AND_FINISH();
7502 IEM_MC_END();
7503 }
7504 }
7505}
7506
7507/**
7508 * @opcode 0x7e
7509 * @oppfx 0xf3
7510 * @opcpuid sse2
7511 * @opgroup og_sse2_pcksclr_datamove
7512 * @opxcpttype none
7513 * @optest op1=1 op2=2 -> op1=2
7514 * @optest op1=0 op2=-42 -> op1=-42
7515 */
7516FNIEMOP_DEF(iemOp_movq_Vq_Wq)
7517{
7518 IEMOP_MNEMONIC2(RM, MOVQ, movq, VqZx_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7519 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7520 if (IEM_IS_MODRM_REG_MODE(bRm))
7521 {
7522 /*
7523 * XMM128, XMM64.
7524 */
7525 IEM_MC_BEGIN(0, 2, 0, 0);
7526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7527 IEM_MC_LOCAL(uint64_t, uSrc);
7528
7529 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7530 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7531
7532 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
7533 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7534
7535 IEM_MC_ADVANCE_RIP_AND_FINISH();
7536 IEM_MC_END();
7537 }
7538 else
7539 {
7540 /*
7541 * XMM128, [mem64].
7542 */
7543 IEM_MC_BEGIN(0, 2, 0, 0);
7544 IEM_MC_LOCAL(uint64_t, uSrc);
7545 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7546
7547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7549 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7550 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7551
7552 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7553 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
7554
7555 IEM_MC_ADVANCE_RIP_AND_FINISH();
7556 IEM_MC_END();
7557 }
7558}
7559
7560/* Opcode 0xf2 0x0f 0x7e - invalid */
7561
7562
7563/** Opcode 0x0f 0x7f - movq Qq, Pq */
7564FNIEMOP_DEF(iemOp_movq_Qq_Pq)
7565{
7566 IEMOP_MNEMONIC2(MR, MOVQ, movq, Qq_WO, Pq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW);
7567 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7568 if (IEM_IS_MODRM_REG_MODE(bRm))
7569 {
7570 /*
7571 * MMX, MMX.
7572 */
7573 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
7574 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
7575 IEM_MC_BEGIN(0, 1, 0, 0);
7576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7577 IEM_MC_LOCAL(uint64_t, u64Tmp);
7578 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7579 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7580 IEM_MC_FPU_TO_MMX_MODE();
7581
7582 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7583 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_RM_8(bRm), u64Tmp);
7584
7585 IEM_MC_ADVANCE_RIP_AND_FINISH();
7586 IEM_MC_END();
7587 }
7588 else
7589 {
7590 /*
7591 * [mem64], MMX.
7592 */
7593 IEM_MC_BEGIN(0, 2, 0, 0);
7594 IEM_MC_LOCAL(uint64_t, u64Tmp);
7595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7596
7597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
7599 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
7600 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7601 IEM_MC_FPU_TO_MMX_MODE();
7602
7603 IEM_MC_FETCH_MREG_U64(u64Tmp, IEM_GET_MODRM_REG_8(bRm));
7604 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
7605
7606 IEM_MC_ADVANCE_RIP_AND_FINISH();
7607 IEM_MC_END();
7608 }
7609}
7610
7611/** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */
7612FNIEMOP_DEF(iemOp_movdqa_Wx_Vx)
7613{
7614 IEMOP_MNEMONIC2(MR, MOVDQA, movdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7615 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7616 if (IEM_IS_MODRM_REG_MODE(bRm))
7617 {
7618 /*
7619 * XMM, XMM.
7620 */
7621 IEM_MC_BEGIN(0, 0, 0, 0);
7622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7623 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7624 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7625 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7626 IEM_GET_MODRM_REG(pVCpu, bRm));
7627 IEM_MC_ADVANCE_RIP_AND_FINISH();
7628 IEM_MC_END();
7629 }
7630 else
7631 {
7632 /*
7633 * [mem128], XMM.
7634 */
7635 IEM_MC_BEGIN(0, 2, 0, 0);
7636 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7638
7639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7641 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7642 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7643
7644 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7645 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7646
7647 IEM_MC_ADVANCE_RIP_AND_FINISH();
7648 IEM_MC_END();
7649 }
7650}
7651
7652/** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */
7653FNIEMOP_DEF(iemOp_movdqu_Wx_Vx)
7654{
7655 IEMOP_MNEMONIC2(MR, MOVDQU, movdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
7656 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7657 if (IEM_IS_MODRM_REG_MODE(bRm))
7658 {
7659 /*
7660 * XMM, XMM.
7661 */
7662 IEM_MC_BEGIN(0, 0, 0, 0);
7663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7664 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7665 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
7666 IEM_MC_COPY_XREG_U128(IEM_GET_MODRM_RM(pVCpu, bRm),
7667 IEM_GET_MODRM_REG(pVCpu, bRm));
7668 IEM_MC_ADVANCE_RIP_AND_FINISH();
7669 IEM_MC_END();
7670 }
7671 else
7672 {
7673 /*
7674 * [mem128], XMM.
7675 */
7676 IEM_MC_BEGIN(0, 2, 0, 0);
7677 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7678 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7679
7680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
7682 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
7683 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
7684
7685 IEM_MC_FETCH_XREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
7686 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
7687
7688 IEM_MC_ADVANCE_RIP_AND_FINISH();
7689 IEM_MC_END();
7690 }
7691}
7692
7693/* Opcode 0xf2 0x0f 0x7f - invalid */
7694
7695
7696
7697/** Opcode 0x0f 0x80. */
7698FNIEMOP_DEF(iemOp_jo_Jv)
7699{
7700 IEMOP_MNEMONIC(jo_Jv, "jo Jv");
7701 IEMOP_HLP_MIN_386();
7702 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7703 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7704 {
7705 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7706 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7708 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7709 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7710 } IEM_MC_ELSE() {
7711 IEM_MC_ADVANCE_RIP_AND_FINISH();
7712 } IEM_MC_ENDIF();
7713 IEM_MC_END();
7714 }
7715 else
7716 {
7717 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7718 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7720 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7721 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7722 } IEM_MC_ELSE() {
7723 IEM_MC_ADVANCE_RIP_AND_FINISH();
7724 } IEM_MC_ENDIF();
7725 IEM_MC_END();
7726 }
7727}
7728
7729
7730/** Opcode 0x0f 0x81. */
7731FNIEMOP_DEF(iemOp_jno_Jv)
7732{
7733 IEMOP_MNEMONIC(jno_Jv, "jno Jv");
7734 IEMOP_HLP_MIN_386();
7735 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7736 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7737 {
7738 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7739 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7741 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7742 IEM_MC_ADVANCE_RIP_AND_FINISH();
7743 } IEM_MC_ELSE() {
7744 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7745 } IEM_MC_ENDIF();
7746 IEM_MC_END();
7747 }
7748 else
7749 {
7750 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7751 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7753 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7754 IEM_MC_ADVANCE_RIP_AND_FINISH();
7755 } IEM_MC_ELSE() {
7756 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7757 } IEM_MC_ENDIF();
7758 IEM_MC_END();
7759 }
7760}
7761
7762
7763/** Opcode 0x0f 0x82. */
7764FNIEMOP_DEF(iemOp_jc_Jv)
7765{
7766 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv");
7767 IEMOP_HLP_MIN_386();
7768 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7769 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7770 {
7771 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7772 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7774 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7775 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7776 } IEM_MC_ELSE() {
7777 IEM_MC_ADVANCE_RIP_AND_FINISH();
7778 } IEM_MC_ENDIF();
7779 IEM_MC_END();
7780 }
7781 else
7782 {
7783 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7784 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7786 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7787 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7788 } IEM_MC_ELSE() {
7789 IEM_MC_ADVANCE_RIP_AND_FINISH();
7790 } IEM_MC_ENDIF();
7791 IEM_MC_END();
7792 }
7793}
7794
7795
7796/** Opcode 0x0f 0x83. */
7797FNIEMOP_DEF(iemOp_jnc_Jv)
7798{
7799 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv");
7800 IEMOP_HLP_MIN_386();
7801 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7802 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7803 {
7804 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7805 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7807 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7808 IEM_MC_ADVANCE_RIP_AND_FINISH();
7809 } IEM_MC_ELSE() {
7810 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7811 } IEM_MC_ENDIF();
7812 IEM_MC_END();
7813 }
7814 else
7815 {
7816 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7817 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7819 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7820 IEM_MC_ADVANCE_RIP_AND_FINISH();
7821 } IEM_MC_ELSE() {
7822 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7823 } IEM_MC_ENDIF();
7824 IEM_MC_END();
7825 }
7826}
7827
7828
7829/** Opcode 0x0f 0x84. */
7830FNIEMOP_DEF(iemOp_je_Jv)
7831{
7832 IEMOP_MNEMONIC(je_Jv, "je/jz Jv");
7833 IEMOP_HLP_MIN_386();
7834 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7835 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7836 {
7837 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7838 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7840 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7841 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7842 } IEM_MC_ELSE() {
7843 IEM_MC_ADVANCE_RIP_AND_FINISH();
7844 } IEM_MC_ENDIF();
7845 IEM_MC_END();
7846 }
7847 else
7848 {
7849 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7850 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7852 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7853 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7854 } IEM_MC_ELSE() {
7855 IEM_MC_ADVANCE_RIP_AND_FINISH();
7856 } IEM_MC_ENDIF();
7857 IEM_MC_END();
7858 }
7859}
7860
7861
7862/** Opcode 0x0f 0x85. */
7863FNIEMOP_DEF(iemOp_jne_Jv)
7864{
7865 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv");
7866 IEMOP_HLP_MIN_386();
7867 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7868 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7869 {
7870 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7871 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7873 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7874 IEM_MC_ADVANCE_RIP_AND_FINISH();
7875 } IEM_MC_ELSE() {
7876 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7877 } IEM_MC_ENDIF();
7878 IEM_MC_END();
7879 }
7880 else
7881 {
7882 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7883 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7885 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7886 IEM_MC_ADVANCE_RIP_AND_FINISH();
7887 } IEM_MC_ELSE() {
7888 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7889 } IEM_MC_ENDIF();
7890 IEM_MC_END();
7891 }
7892}
7893
7894
7895/** Opcode 0x0f 0x86. */
7896FNIEMOP_DEF(iemOp_jbe_Jv)
7897{
7898 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv");
7899 IEMOP_HLP_MIN_386();
7900 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7901 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7902 {
7903 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7904 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7906 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7907 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7908 } IEM_MC_ELSE() {
7909 IEM_MC_ADVANCE_RIP_AND_FINISH();
7910 } IEM_MC_ENDIF();
7911 IEM_MC_END();
7912 }
7913 else
7914 {
7915 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7916 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7918 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7919 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7920 } IEM_MC_ELSE() {
7921 IEM_MC_ADVANCE_RIP_AND_FINISH();
7922 } IEM_MC_ENDIF();
7923 IEM_MC_END();
7924 }
7925}
7926
7927
7928/** Opcode 0x0f 0x87. */
7929FNIEMOP_DEF(iemOp_jnbe_Jv)
7930{
7931 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv");
7932 IEMOP_HLP_MIN_386();
7933 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7934 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7935 {
7936 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7937 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7939 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7940 IEM_MC_ADVANCE_RIP_AND_FINISH();
7941 } IEM_MC_ELSE() {
7942 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7943 } IEM_MC_ENDIF();
7944 IEM_MC_END();
7945 }
7946 else
7947 {
7948 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7949 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7951 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7952 IEM_MC_ADVANCE_RIP_AND_FINISH();
7953 } IEM_MC_ELSE() {
7954 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7955 } IEM_MC_ENDIF();
7956 IEM_MC_END();
7957 }
7958}
7959
7960
7961/** Opcode 0x0f 0x88. */
7962FNIEMOP_DEF(iemOp_js_Jv)
7963{
7964 IEMOP_MNEMONIC(js_Jv, "js Jv");
7965 IEMOP_HLP_MIN_386();
7966 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7967 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
7968 {
7969 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7970 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
7971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7972 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7973 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
7974 } IEM_MC_ELSE() {
7975 IEM_MC_ADVANCE_RIP_AND_FINISH();
7976 } IEM_MC_ENDIF();
7977 IEM_MC_END();
7978 }
7979 else
7980 {
7981 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7982 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
7983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7984 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7985 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
7986 } IEM_MC_ELSE() {
7987 IEM_MC_ADVANCE_RIP_AND_FINISH();
7988 } IEM_MC_ENDIF();
7989 IEM_MC_END();
7990 }
7991}
7992
7993
7994/** Opcode 0x0f 0x89. */
7995FNIEMOP_DEF(iemOp_jns_Jv)
7996{
7997 IEMOP_MNEMONIC(jns_Jv, "jns Jv");
7998 IEMOP_HLP_MIN_386();
7999 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8000 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8001 {
8002 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8003 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8005 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8006 IEM_MC_ADVANCE_RIP_AND_FINISH();
8007 } IEM_MC_ELSE() {
8008 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8009 } IEM_MC_ENDIF();
8010 IEM_MC_END();
8011 }
8012 else
8013 {
8014 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8015 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8017 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8018 IEM_MC_ADVANCE_RIP_AND_FINISH();
8019 } IEM_MC_ELSE() {
8020 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8021 } IEM_MC_ENDIF();
8022 IEM_MC_END();
8023 }
8024}
8025
8026
8027/** Opcode 0x0f 0x8a. */
8028FNIEMOP_DEF(iemOp_jp_Jv)
8029{
8030 IEMOP_MNEMONIC(jp_Jv, "jp Jv");
8031 IEMOP_HLP_MIN_386();
8032 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8033 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8034 {
8035 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8036 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8038 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8039 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8040 } IEM_MC_ELSE() {
8041 IEM_MC_ADVANCE_RIP_AND_FINISH();
8042 } IEM_MC_ENDIF();
8043 IEM_MC_END();
8044 }
8045 else
8046 {
8047 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8048 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8050 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8051 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8052 } IEM_MC_ELSE() {
8053 IEM_MC_ADVANCE_RIP_AND_FINISH();
8054 } IEM_MC_ENDIF();
8055 IEM_MC_END();
8056 }
8057}
8058
8059
8060/** Opcode 0x0f 0x8b. */
8061FNIEMOP_DEF(iemOp_jnp_Jv)
8062{
8063 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv");
8064 IEMOP_HLP_MIN_386();
8065 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8066 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8067 {
8068 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8069 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8071 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8072 IEM_MC_ADVANCE_RIP_AND_FINISH();
8073 } IEM_MC_ELSE() {
8074 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8075 } IEM_MC_ENDIF();
8076 IEM_MC_END();
8077 }
8078 else
8079 {
8080 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8081 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8083 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8084 IEM_MC_ADVANCE_RIP_AND_FINISH();
8085 } IEM_MC_ELSE() {
8086 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8087 } IEM_MC_ENDIF();
8088 IEM_MC_END();
8089 }
8090}
8091
8092
8093/** Opcode 0x0f 0x8c. */
8094FNIEMOP_DEF(iemOp_jl_Jv)
8095{
8096 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv");
8097 IEMOP_HLP_MIN_386();
8098 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8099 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8100 {
8101 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8102 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8104 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8105 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8106 } IEM_MC_ELSE() {
8107 IEM_MC_ADVANCE_RIP_AND_FINISH();
8108 } IEM_MC_ENDIF();
8109 IEM_MC_END();
8110 }
8111 else
8112 {
8113 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8114 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8116 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8117 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8118 } IEM_MC_ELSE() {
8119 IEM_MC_ADVANCE_RIP_AND_FINISH();
8120 } IEM_MC_ENDIF();
8121 IEM_MC_END();
8122 }
8123}
8124
8125
8126/** Opcode 0x0f 0x8d. */
8127FNIEMOP_DEF(iemOp_jnl_Jv)
8128{
8129 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv");
8130 IEMOP_HLP_MIN_386();
8131 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8132 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8133 {
8134 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8135 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8137 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8138 IEM_MC_ADVANCE_RIP_AND_FINISH();
8139 } IEM_MC_ELSE() {
8140 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8141 } IEM_MC_ENDIF();
8142 IEM_MC_END();
8143 }
8144 else
8145 {
8146 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8147 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8149 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8150 IEM_MC_ADVANCE_RIP_AND_FINISH();
8151 } IEM_MC_ELSE() {
8152 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8153 } IEM_MC_ENDIF();
8154 IEM_MC_END();
8155 }
8156}
8157
8158
8159/** Opcode 0x0f 0x8e. */
8160FNIEMOP_DEF(iemOp_jle_Jv)
8161{
8162 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv");
8163 IEMOP_HLP_MIN_386();
8164 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8165 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8166 {
8167 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8168 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8170 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8171 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8172 } IEM_MC_ELSE() {
8173 IEM_MC_ADVANCE_RIP_AND_FINISH();
8174 } IEM_MC_ENDIF();
8175 IEM_MC_END();
8176 }
8177 else
8178 {
8179 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8180 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8182 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8183 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8184 } IEM_MC_ELSE() {
8185 IEM_MC_ADVANCE_RIP_AND_FINISH();
8186 } IEM_MC_ENDIF();
8187 IEM_MC_END();
8188 }
8189}
8190
8191
8192/** Opcode 0x0f 0x8f. */
8193FNIEMOP_DEF(iemOp_jnle_Jv)
8194{
8195 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv");
8196 IEMOP_HLP_MIN_386();
8197 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8198 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
8199 {
8200 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8201 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
8202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8203 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8204 IEM_MC_ADVANCE_RIP_AND_FINISH();
8205 } IEM_MC_ELSE() {
8206 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
8207 } IEM_MC_ENDIF();
8208 IEM_MC_END();
8209 }
8210 else
8211 {
8212 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8213 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
8214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8215 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8216 IEM_MC_ADVANCE_RIP_AND_FINISH();
8217 } IEM_MC_ELSE() {
8218 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
8219 } IEM_MC_ENDIF();
8220 IEM_MC_END();
8221 }
8222}
8223
8224
8225/** Opcode 0x0f 0x90. */
8226FNIEMOP_DEF(iemOp_seto_Eb)
8227{
8228 IEMOP_MNEMONIC(seto_Eb, "seto Eb");
8229 IEMOP_HLP_MIN_386();
8230 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8231
8232 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8233 * any way. AMD says it's "unused", whatever that means. We're
8234 * ignoring for now. */
8235 if (IEM_IS_MODRM_REG_MODE(bRm))
8236 {
8237 /* register target */
8238 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8240 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8241 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8242 } IEM_MC_ELSE() {
8243 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8244 } IEM_MC_ENDIF();
8245 IEM_MC_ADVANCE_RIP_AND_FINISH();
8246 IEM_MC_END();
8247 }
8248 else
8249 {
8250 /* memory target */
8251 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8255 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8256 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8257 } IEM_MC_ELSE() {
8258 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8259 } IEM_MC_ENDIF();
8260 IEM_MC_ADVANCE_RIP_AND_FINISH();
8261 IEM_MC_END();
8262 }
8263}
8264
8265
8266/** Opcode 0x0f 0x91. */
8267FNIEMOP_DEF(iemOp_setno_Eb)
8268{
8269 IEMOP_MNEMONIC(setno_Eb, "setno Eb");
8270 IEMOP_HLP_MIN_386();
8271 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8272
8273 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8274 * any way. AMD says it's "unused", whatever that means. We're
8275 * ignoring for now. */
8276 if (IEM_IS_MODRM_REG_MODE(bRm))
8277 {
8278 /* register target */
8279 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8281 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8282 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8283 } IEM_MC_ELSE() {
8284 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8285 } IEM_MC_ENDIF();
8286 IEM_MC_ADVANCE_RIP_AND_FINISH();
8287 IEM_MC_END();
8288 }
8289 else
8290 {
8291 /* memory target */
8292 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8296 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8297 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8298 } IEM_MC_ELSE() {
8299 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8300 } IEM_MC_ENDIF();
8301 IEM_MC_ADVANCE_RIP_AND_FINISH();
8302 IEM_MC_END();
8303 }
8304}
8305
8306
8307/** Opcode 0x0f 0x92. */
8308FNIEMOP_DEF(iemOp_setc_Eb)
8309{
8310 IEMOP_MNEMONIC(setc_Eb, "setc Eb");
8311 IEMOP_HLP_MIN_386();
8312 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8313
8314 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8315 * any way. AMD says it's "unused", whatever that means. We're
8316 * ignoring for now. */
8317 if (IEM_IS_MODRM_REG_MODE(bRm))
8318 {
8319 /* register target */
8320 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8322 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8323 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8324 } IEM_MC_ELSE() {
8325 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8326 } IEM_MC_ENDIF();
8327 IEM_MC_ADVANCE_RIP_AND_FINISH();
8328 IEM_MC_END();
8329 }
8330 else
8331 {
8332 /* memory target */
8333 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8334 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8337 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8338 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8339 } IEM_MC_ELSE() {
8340 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8341 } IEM_MC_ENDIF();
8342 IEM_MC_ADVANCE_RIP_AND_FINISH();
8343 IEM_MC_END();
8344 }
8345}
8346
8347
8348/** Opcode 0x0f 0x93. */
8349FNIEMOP_DEF(iemOp_setnc_Eb)
8350{
8351 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb");
8352 IEMOP_HLP_MIN_386();
8353 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8354
8355 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8356 * any way. AMD says it's "unused", whatever that means. We're
8357 * ignoring for now. */
8358 if (IEM_IS_MODRM_REG_MODE(bRm))
8359 {
8360 /* register target */
8361 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8363 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8364 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8365 } IEM_MC_ELSE() {
8366 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8367 } IEM_MC_ENDIF();
8368 IEM_MC_ADVANCE_RIP_AND_FINISH();
8369 IEM_MC_END();
8370 }
8371 else
8372 {
8373 /* memory target */
8374 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8378 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8379 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8380 } IEM_MC_ELSE() {
8381 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8382 } IEM_MC_ENDIF();
8383 IEM_MC_ADVANCE_RIP_AND_FINISH();
8384 IEM_MC_END();
8385 }
8386}
8387
8388
8389/** Opcode 0x0f 0x94. */
8390FNIEMOP_DEF(iemOp_sete_Eb)
8391{
8392 IEMOP_MNEMONIC(sete_Eb, "sete Eb");
8393 IEMOP_HLP_MIN_386();
8394 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8395
8396 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8397 * any way. AMD says it's "unused", whatever that means. We're
8398 * ignoring for now. */
8399 if (IEM_IS_MODRM_REG_MODE(bRm))
8400 {
8401 /* register target */
8402 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8404 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8405 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8406 } IEM_MC_ELSE() {
8407 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8408 } IEM_MC_ENDIF();
8409 IEM_MC_ADVANCE_RIP_AND_FINISH();
8410 IEM_MC_END();
8411 }
8412 else
8413 {
8414 /* memory target */
8415 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8419 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8420 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8421 } IEM_MC_ELSE() {
8422 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8423 } IEM_MC_ENDIF();
8424 IEM_MC_ADVANCE_RIP_AND_FINISH();
8425 IEM_MC_END();
8426 }
8427}
8428
8429
8430/** Opcode 0x0f 0x95. */
8431FNIEMOP_DEF(iemOp_setne_Eb)
8432{
8433 IEMOP_MNEMONIC(setne_Eb, "setne Eb");
8434 IEMOP_HLP_MIN_386();
8435 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8436
8437 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8438 * any way. AMD says it's "unused", whatever that means. We're
8439 * ignoring for now. */
8440 if (IEM_IS_MODRM_REG_MODE(bRm))
8441 {
8442 /* register target */
8443 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8445 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8446 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8447 } IEM_MC_ELSE() {
8448 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8449 } IEM_MC_ENDIF();
8450 IEM_MC_ADVANCE_RIP_AND_FINISH();
8451 IEM_MC_END();
8452 }
8453 else
8454 {
8455 /* memory target */
8456 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8460 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8461 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8462 } IEM_MC_ELSE() {
8463 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8464 } IEM_MC_ENDIF();
8465 IEM_MC_ADVANCE_RIP_AND_FINISH();
8466 IEM_MC_END();
8467 }
8468}
8469
8470
8471/** Opcode 0x0f 0x96. */
8472FNIEMOP_DEF(iemOp_setbe_Eb)
8473{
8474 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb");
8475 IEMOP_HLP_MIN_386();
8476 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8477
8478 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8479 * any way. AMD says it's "unused", whatever that means. We're
8480 * ignoring for now. */
8481 if (IEM_IS_MODRM_REG_MODE(bRm))
8482 {
8483 /* register target */
8484 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8486 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8487 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8488 } IEM_MC_ELSE() {
8489 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8490 } IEM_MC_ENDIF();
8491 IEM_MC_ADVANCE_RIP_AND_FINISH();
8492 IEM_MC_END();
8493 }
8494 else
8495 {
8496 /* memory target */
8497 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8501 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8502 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8503 } IEM_MC_ELSE() {
8504 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8505 } IEM_MC_ENDIF();
8506 IEM_MC_ADVANCE_RIP_AND_FINISH();
8507 IEM_MC_END();
8508 }
8509}
8510
8511
8512/** Opcode 0x0f 0x97. */
8513FNIEMOP_DEF(iemOp_setnbe_Eb)
8514{
8515 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb");
8516 IEMOP_HLP_MIN_386();
8517 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8518
8519 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8520 * any way. AMD says it's "unused", whatever that means. We're
8521 * ignoring for now. */
8522 if (IEM_IS_MODRM_REG_MODE(bRm))
8523 {
8524 /* register target */
8525 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8527 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8528 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8529 } IEM_MC_ELSE() {
8530 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8531 } IEM_MC_ENDIF();
8532 IEM_MC_ADVANCE_RIP_AND_FINISH();
8533 IEM_MC_END();
8534 }
8535 else
8536 {
8537 /* memory target */
8538 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8542 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8543 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8544 } IEM_MC_ELSE() {
8545 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8546 } IEM_MC_ENDIF();
8547 IEM_MC_ADVANCE_RIP_AND_FINISH();
8548 IEM_MC_END();
8549 }
8550}
8551
8552
8553/** Opcode 0x0f 0x98. */
8554FNIEMOP_DEF(iemOp_sets_Eb)
8555{
8556 IEMOP_MNEMONIC(sets_Eb, "sets Eb");
8557 IEMOP_HLP_MIN_386();
8558 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8559
8560 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8561 * any way. AMD says it's "unused", whatever that means. We're
8562 * ignoring for now. */
8563 if (IEM_IS_MODRM_REG_MODE(bRm))
8564 {
8565 /* register target */
8566 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8568 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8569 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8570 } IEM_MC_ELSE() {
8571 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8572 } IEM_MC_ENDIF();
8573 IEM_MC_ADVANCE_RIP_AND_FINISH();
8574 IEM_MC_END();
8575 }
8576 else
8577 {
8578 /* memory target */
8579 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8581 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8583 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8584 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8585 } IEM_MC_ELSE() {
8586 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8587 } IEM_MC_ENDIF();
8588 IEM_MC_ADVANCE_RIP_AND_FINISH();
8589 IEM_MC_END();
8590 }
8591}
8592
8593
8594/** Opcode 0x0f 0x99. */
8595FNIEMOP_DEF(iemOp_setns_Eb)
8596{
8597 IEMOP_MNEMONIC(setns_Eb, "setns Eb");
8598 IEMOP_HLP_MIN_386();
8599 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8600
8601 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8602 * any way. AMD says it's "unused", whatever that means. We're
8603 * ignoring for now. */
8604 if (IEM_IS_MODRM_REG_MODE(bRm))
8605 {
8606 /* register target */
8607 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8609 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8610 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8611 } IEM_MC_ELSE() {
8612 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8613 } IEM_MC_ENDIF();
8614 IEM_MC_ADVANCE_RIP_AND_FINISH();
8615 IEM_MC_END();
8616 }
8617 else
8618 {
8619 /* memory target */
8620 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8624 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8625 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8626 } IEM_MC_ELSE() {
8627 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8628 } IEM_MC_ENDIF();
8629 IEM_MC_ADVANCE_RIP_AND_FINISH();
8630 IEM_MC_END();
8631 }
8632}
8633
8634
8635/** Opcode 0x0f 0x9a. */
8636FNIEMOP_DEF(iemOp_setp_Eb)
8637{
8638 IEMOP_MNEMONIC(setp_Eb, "setp Eb");
8639 IEMOP_HLP_MIN_386();
8640 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8641
8642 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8643 * any way. AMD says it's "unused", whatever that means. We're
8644 * ignoring for now. */
8645 if (IEM_IS_MODRM_REG_MODE(bRm))
8646 {
8647 /* register target */
8648 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8650 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8651 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8652 } IEM_MC_ELSE() {
8653 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8654 } IEM_MC_ENDIF();
8655 IEM_MC_ADVANCE_RIP_AND_FINISH();
8656 IEM_MC_END();
8657 }
8658 else
8659 {
8660 /* memory target */
8661 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8665 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8666 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8667 } IEM_MC_ELSE() {
8668 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8669 } IEM_MC_ENDIF();
8670 IEM_MC_ADVANCE_RIP_AND_FINISH();
8671 IEM_MC_END();
8672 }
8673}
8674
8675
8676/** Opcode 0x0f 0x9b. */
8677FNIEMOP_DEF(iemOp_setnp_Eb)
8678{
8679 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb");
8680 IEMOP_HLP_MIN_386();
8681 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8682
8683 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8684 * any way. AMD says it's "unused", whatever that means. We're
8685 * ignoring for now. */
8686 if (IEM_IS_MODRM_REG_MODE(bRm))
8687 {
8688 /* register target */
8689 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8691 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8692 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8693 } IEM_MC_ELSE() {
8694 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8695 } IEM_MC_ENDIF();
8696 IEM_MC_ADVANCE_RIP_AND_FINISH();
8697 IEM_MC_END();
8698 }
8699 else
8700 {
8701 /* memory target */
8702 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8706 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8707 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8708 } IEM_MC_ELSE() {
8709 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8710 } IEM_MC_ENDIF();
8711 IEM_MC_ADVANCE_RIP_AND_FINISH();
8712 IEM_MC_END();
8713 }
8714}
8715
8716
8717/** Opcode 0x0f 0x9c. */
8718FNIEMOP_DEF(iemOp_setl_Eb)
8719{
8720 IEMOP_MNEMONIC(setl_Eb, "setl Eb");
8721 IEMOP_HLP_MIN_386();
8722 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8723
8724 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8725 * any way. AMD says it's "unused", whatever that means. We're
8726 * ignoring for now. */
8727 if (IEM_IS_MODRM_REG_MODE(bRm))
8728 {
8729 /* register target */
8730 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8732 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8733 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8734 } IEM_MC_ELSE() {
8735 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8736 } IEM_MC_ENDIF();
8737 IEM_MC_ADVANCE_RIP_AND_FINISH();
8738 IEM_MC_END();
8739 }
8740 else
8741 {
8742 /* memory target */
8743 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8747 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8748 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8749 } IEM_MC_ELSE() {
8750 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8751 } IEM_MC_ENDIF();
8752 IEM_MC_ADVANCE_RIP_AND_FINISH();
8753 IEM_MC_END();
8754 }
8755}
8756
8757
8758/** Opcode 0x0f 0x9d. */
8759FNIEMOP_DEF(iemOp_setnl_Eb)
8760{
8761 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb");
8762 IEMOP_HLP_MIN_386();
8763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8764
8765 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8766 * any way. AMD says it's "unused", whatever that means. We're
8767 * ignoring for now. */
8768 if (IEM_IS_MODRM_REG_MODE(bRm))
8769 {
8770 /* register target */
8771 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8773 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8774 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8775 } IEM_MC_ELSE() {
8776 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8777 } IEM_MC_ENDIF();
8778 IEM_MC_ADVANCE_RIP_AND_FINISH();
8779 IEM_MC_END();
8780 }
8781 else
8782 {
8783 /* memory target */
8784 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8788 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
8789 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8790 } IEM_MC_ELSE() {
8791 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8792 } IEM_MC_ENDIF();
8793 IEM_MC_ADVANCE_RIP_AND_FINISH();
8794 IEM_MC_END();
8795 }
8796}
8797
8798
8799/** Opcode 0x0f 0x9e. */
8800FNIEMOP_DEF(iemOp_setle_Eb)
8801{
8802 IEMOP_MNEMONIC(setle_Eb, "setle Eb");
8803 IEMOP_HLP_MIN_386();
8804 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8805
8806 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8807 * any way. AMD says it's "unused", whatever that means. We're
8808 * ignoring for now. */
8809 if (IEM_IS_MODRM_REG_MODE(bRm))
8810 {
8811 /* register target */
8812 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8814 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8815 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8816 } IEM_MC_ELSE() {
8817 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8818 } IEM_MC_ENDIF();
8819 IEM_MC_ADVANCE_RIP_AND_FINISH();
8820 IEM_MC_END();
8821 }
8822 else
8823 {
8824 /* memory target */
8825 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8829 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8830 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8831 } IEM_MC_ELSE() {
8832 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8833 } IEM_MC_ENDIF();
8834 IEM_MC_ADVANCE_RIP_AND_FINISH();
8835 IEM_MC_END();
8836 }
8837}
8838
8839
8840/** Opcode 0x0f 0x9f. */
8841FNIEMOP_DEF(iemOp_setnle_Eb)
8842{
8843 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb");
8844 IEMOP_HLP_MIN_386();
8845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8846
8847 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
8848 * any way. AMD says it's "unused", whatever that means. We're
8849 * ignoring for now. */
8850 if (IEM_IS_MODRM_REG_MODE(bRm))
8851 {
8852 /* register target */
8853 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8855 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8856 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
8857 } IEM_MC_ELSE() {
8858 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 1);
8859 } IEM_MC_ENDIF();
8860 IEM_MC_ADVANCE_RIP_AND_FINISH();
8861 IEM_MC_END();
8862 }
8863 else
8864 {
8865 /* memory target */
8866 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8867 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8868 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8870 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
8871 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0);
8872 } IEM_MC_ELSE() {
8873 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1);
8874 } IEM_MC_ENDIF();
8875 IEM_MC_ADVANCE_RIP_AND_FINISH();
8876 IEM_MC_END();
8877 }
8878}
8879
8880
8881/** Opcode 0x0f 0xa0. */
8882FNIEMOP_DEF(iemOp_push_fs)
8883{
8884 IEMOP_MNEMONIC(push_fs, "push fs");
8885 IEMOP_HLP_MIN_386();
8886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8887 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
8888}
8889
8890
8891/** Opcode 0x0f 0xa1. */
8892FNIEMOP_DEF(iemOp_pop_fs)
8893{
8894 IEMOP_MNEMONIC(pop_fs, "pop fs");
8895 IEMOP_HLP_MIN_386();
8896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8897 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize);
8898}
8899
8900
8901/** Opcode 0x0f 0xa2. */
8902FNIEMOP_DEF(iemOp_cpuid)
8903{
8904 IEMOP_MNEMONIC(cpuid, "cpuid");
8905 IEMOP_HLP_MIN_486(); /* not all 486es. */
8906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8907 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_cpuid);
8908}
8909
8910
8911/**
8912 * Body for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
8913 * iemOp_bts_Ev_Gv.
8914 */
8915
8916#define IEMOP_BODY_BIT_Ev_Gv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
8917 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
8918 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
8919 \
8920 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8921 { \
8922 /* register destination. */ \
8923 switch (pVCpu->iem.s.enmEffOpSize) \
8924 { \
8925 case IEMMODE_16BIT: \
8926 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
8927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8928 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8929 IEM_MC_ARG(uint16_t, u16Src, 1); \
8930 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8931 \
8932 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8933 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
8934 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8935 IEM_MC_REF_EFLAGS(pEFlags); \
8936 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
8937 \
8938 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8939 IEM_MC_END(); \
8940 break; \
8941 \
8942 case IEMMODE_32BIT: \
8943 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
8944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8945 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8946 IEM_MC_ARG(uint32_t, u32Src, 1); \
8947 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8948 \
8949 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8950 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
8951 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8952 IEM_MC_REF_EFLAGS(pEFlags); \
8953 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
8954 \
8955 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8956 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8957 IEM_MC_END(); \
8958 break; \
8959 \
8960 case IEMMODE_64BIT: \
8961 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
8962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8963 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8964 IEM_MC_ARG(uint64_t, u64Src, 1); \
8965 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8966 \
8967 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8968 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
8969 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8970 IEM_MC_REF_EFLAGS(pEFlags); \
8971 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
8972 \
8973 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8974 IEM_MC_END(); \
8975 break; \
8976 \
8977 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8978 } \
8979 } \
8980 else \
8981 { \
8982 /* memory destination. */ \
8983 /** @todo test negative bit offsets! */ \
8984 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
8985 { \
8986 switch (pVCpu->iem.s.enmEffOpSize) \
8987 { \
8988 case IEMMODE_16BIT: \
8989 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
8990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8991 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8992 IEMOP_HLP_DONE_DECODING(); \
8993 \
8994 IEM_MC_ARG(uint16_t, u16Src, 1); \
8995 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
8996 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
8997 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
8998 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
8999 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9000 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9001 \
9002 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9003 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9004 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9005 \
9006 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9007 IEM_MC_FETCH_EFLAGS(EFlags); \
9008 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9009 \
9010 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
9011 IEM_MC_COMMIT_EFLAGS(EFlags); \
9012 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9013 IEM_MC_END(); \
9014 break; \
9015 \
9016 case IEMMODE_32BIT: \
9017 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9018 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9019 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9020 IEMOP_HLP_DONE_DECODING(); \
9021 \
9022 IEM_MC_ARG(uint32_t, u32Src, 1); \
9023 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9024 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9025 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9026 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9027 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9028 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9029 \
9030 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9031 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9032 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9033 \
9034 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9035 IEM_MC_FETCH_EFLAGS(EFlags); \
9036 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9037 \
9038 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
9039 IEM_MC_COMMIT_EFLAGS(EFlags); \
9040 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9041 IEM_MC_END(); \
9042 break; \
9043 \
9044 case IEMMODE_64BIT: \
9045 IEM_MC_BEGIN(3, 5, IEM_MC_F_64BIT, 0); \
9046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9048 IEMOP_HLP_DONE_DECODING(); \
9049 \
9050 IEM_MC_ARG(uint64_t, u64Src, 1); \
9051 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9052 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9053 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9054 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9055 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9056 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9057 \
9058 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9059 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9060 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9061 \
9062 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9063 IEM_MC_FETCH_EFLAGS(EFlags); \
9064 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9065 \
9066 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
9067 IEM_MC_COMMIT_EFLAGS(EFlags); \
9068 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9069 IEM_MC_END(); \
9070 break; \
9071 \
9072 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9073 } \
9074 } \
9075 else \
9076 { \
9077 (void)0
9078/* Separate macro to work around parsing issue in IEMAllInstPython.py */
9079#define IEMOP_BODY_BIT_Ev_Gv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
9080 switch (pVCpu->iem.s.enmEffOpSize) \
9081 { \
9082 case IEMMODE_16BIT: \
9083 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9086 IEMOP_HLP_DONE_DECODING(); \
9087 \
9088 IEM_MC_ARG(uint16_t, u16Src, 1); \
9089 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9090 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9091 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9092 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9093 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9094 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9095 \
9096 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9097 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9098 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9099 \
9100 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9101 IEM_MC_FETCH_EFLAGS(EFlags); \
9102 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
9103 \
9104 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
9105 IEM_MC_COMMIT_EFLAGS(EFlags); \
9106 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9107 IEM_MC_END(); \
9108 break; \
9109 \
9110 case IEMMODE_32BIT: \
9111 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9113 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9114 IEMOP_HLP_DONE_DECODING(); \
9115 \
9116 IEM_MC_ARG(uint32_t, u32Src, 1); \
9117 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9118 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9119 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9120 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9121 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9122 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9123 \
9124 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9125 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9126 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9127 \
9128 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9129 IEM_MC_FETCH_EFLAGS(EFlags); \
9130 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
9131 \
9132 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
9133 IEM_MC_COMMIT_EFLAGS(EFlags); \
9134 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9135 IEM_MC_END(); \
9136 break; \
9137 \
9138 case IEMMODE_64BIT: \
9139 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0); \
9140 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9142 IEMOP_HLP_DONE_DECODING(); \
9143 \
9144 IEM_MC_ARG(uint64_t, u64Src, 1); \
9145 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9146 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9147 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9148 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9149 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9150 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9151 \
9152 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9153 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9154 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9155 \
9156 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9157 IEM_MC_FETCH_EFLAGS(EFlags); \
9158 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
9159 \
9160 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
9161 IEM_MC_COMMIT_EFLAGS(EFlags); \
9162 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9163 IEM_MC_END(); \
9164 break; \
9165 \
9166 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9167 } \
9168 } \
9169 } \
9170 (void)0
9171
9172/* Read-only version (bt). */
9173#define IEMOP_BODY_BIT_Ev_Gv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
9174 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
9175 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
9176 \
9177 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9178 { \
9179 /* register destination. */ \
9180 switch (pVCpu->iem.s.enmEffOpSize) \
9181 { \
9182 case IEMMODE_16BIT: \
9183 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9185 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
9186 IEM_MC_ARG(uint16_t, u16Src, 1); \
9187 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9188 \
9189 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9190 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); \
9191 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9192 IEM_MC_REF_EFLAGS(pEFlags); \
9193 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9194 \
9195 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9196 IEM_MC_END(); \
9197 break; \
9198 \
9199 case IEMMODE_32BIT: \
9200 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9202 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
9203 IEM_MC_ARG(uint32_t, u32Src, 1); \
9204 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9205 \
9206 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9207 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); \
9208 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9209 IEM_MC_REF_EFLAGS(pEFlags); \
9210 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9211 \
9212 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9213 IEM_MC_END(); \
9214 break; \
9215 \
9216 case IEMMODE_64BIT: \
9217 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9219 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
9220 IEM_MC_ARG(uint64_t, u64Src, 1); \
9221 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9222 \
9223 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9224 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); \
9225 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9226 IEM_MC_REF_EFLAGS(pEFlags); \
9227 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9228 \
9229 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9230 IEM_MC_END(); \
9231 break; \
9232 \
9233 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9234 } \
9235 } \
9236 else \
9237 { \
9238 /* memory destination. */ \
9239 /** @todo test negative bit offsets! */ \
9240 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
9241 { \
9242 switch (pVCpu->iem.s.enmEffOpSize) \
9243 { \
9244 case IEMMODE_16BIT: \
9245 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9248 IEMOP_HLP_DONE_DECODING(); \
9249 \
9250 IEM_MC_ARG(uint16_t, u16Src, 1); \
9251 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9252 IEM_MC_LOCAL_ASSIGN(int16_t, i16AddrAdj, /*=*/ u16Src); \
9253 IEM_MC_AND_ARG_U16(u16Src, 0x0f); \
9254 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); \
9255 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); \
9256 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); \
9257 \
9258 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9259 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
9260 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9261 \
9262 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9263 IEM_MC_FETCH_EFLAGS(EFlags); \
9264 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
9265 \
9266 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
9267 IEM_MC_COMMIT_EFLAGS(EFlags); \
9268 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9269 IEM_MC_END(); \
9270 break; \
9271 \
9272 case IEMMODE_32BIT: \
9273 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_386, 0); \
9274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9276 IEMOP_HLP_DONE_DECODING(); \
9277 \
9278 IEM_MC_ARG(uint32_t, u32Src, 1); \
9279 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9280 IEM_MC_LOCAL_ASSIGN(int32_t, i32AddrAdj, /*=*/ u32Src); \
9281 IEM_MC_AND_ARG_U32(u32Src, 0x1f); \
9282 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); \
9283 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); \
9284 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); \
9285 \
9286 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
9287 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9288 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9289 \
9290 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9291 IEM_MC_FETCH_EFLAGS(EFlags); \
9292 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
9293 \
9294 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
9295 IEM_MC_COMMIT_EFLAGS(EFlags); \
9296 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9297 IEM_MC_END(); \
9298 break; \
9299 \
9300 case IEMMODE_64BIT: \
9301 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0); \
9302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9304 IEMOP_HLP_DONE_DECODING(); \
9305 \
9306 IEM_MC_ARG(uint64_t, u64Src, 1); \
9307 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
9308 IEM_MC_LOCAL_ASSIGN(int64_t, i64AddrAdj, /*=*/ u64Src); \
9309 IEM_MC_AND_ARG_U64(u64Src, 0x3f); \
9310 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); \
9311 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); \
9312 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); \
9313 \
9314 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9315 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
9316 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9317 \
9318 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
9319 IEM_MC_FETCH_EFLAGS(EFlags); \
9320 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
9321 \
9322 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
9323 IEM_MC_COMMIT_EFLAGS(EFlags); \
9324 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9325 IEM_MC_END(); \
9326 break; \
9327 \
9328 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9329 } \
9330 } \
9331 else \
9332 { \
9333 IEMOP_HLP_DONE_DECODING(); \
9334 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
9335 } \
9336 } \
9337 (void)0
9338
9339
9340/** Opcode 0x0f 0xa3. */
9341FNIEMOP_DEF(iemOp_bt_Ev_Gv)
9342{
9343 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv");
9344 IEMOP_HLP_MIN_386();
9345 IEMOP_BODY_BIT_Ev_Gv_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
9346}
9347
9348
9349/**
9350 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
9351 */
9352FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
9353{
9354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9355 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9356
9357 if (IEM_IS_MODRM_REG_MODE(bRm))
9358 {
9359 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9360
9361 switch (pVCpu->iem.s.enmEffOpSize)
9362 {
9363 case IEMMODE_16BIT:
9364 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0);
9365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9366 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9367 IEM_MC_ARG(uint16_t, u16Src, 1);
9368 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9369 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9370
9371 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9372 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9373 IEM_MC_REF_EFLAGS(pEFlags);
9374 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9375
9376 IEM_MC_ADVANCE_RIP_AND_FINISH();
9377 IEM_MC_END();
9378 break;
9379
9380 case IEMMODE_32BIT:
9381 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0);
9382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9383 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9384 IEM_MC_ARG(uint32_t, u32Src, 1);
9385 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9386 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9387
9388 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9389 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9390 IEM_MC_REF_EFLAGS(pEFlags);
9391 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9392
9393 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
9394 IEM_MC_ADVANCE_RIP_AND_FINISH();
9395 IEM_MC_END();
9396 break;
9397
9398 case IEMMODE_64BIT:
9399 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT, 0);
9400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9401 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9402 IEM_MC_ARG(uint64_t, u64Src, 1);
9403 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
9404 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9405
9406 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9407 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9408 IEM_MC_REF_EFLAGS(pEFlags);
9409 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9410
9411 IEM_MC_ADVANCE_RIP_AND_FINISH();
9412 IEM_MC_END();
9413 break;
9414
9415 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9416 }
9417 }
9418 else
9419 {
9420 switch (pVCpu->iem.s.enmEffOpSize)
9421 {
9422 case IEMMODE_16BIT:
9423 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0);
9424 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9426
9427 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9429
9430 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9431 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9432 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9433
9434 IEM_MC_ARG(uint16_t, u16Src, 1);
9435 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9436 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2);
9437 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9438 IEM_MC_FETCH_EFLAGS(EFlags);
9439 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9440
9441 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
9442 IEM_MC_COMMIT_EFLAGS(EFlags);
9443 IEM_MC_ADVANCE_RIP_AND_FINISH();
9444 IEM_MC_END();
9445 break;
9446
9447 case IEMMODE_32BIT:
9448 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0);
9449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9450 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9451
9452 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9454
9455 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9456 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9457 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9458
9459 IEM_MC_ARG(uint32_t, u32Src, 1);
9460 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9461 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2);
9462 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9463 IEM_MC_FETCH_EFLAGS(EFlags);
9464 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9465
9466 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
9467 IEM_MC_COMMIT_EFLAGS(EFlags);
9468 IEM_MC_ADVANCE_RIP_AND_FINISH();
9469 IEM_MC_END();
9470 break;
9471
9472 case IEMMODE_64BIT:
9473 IEM_MC_BEGIN(4, 3, IEM_MC_F_64BIT, 0);
9474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9476
9477 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
9478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9479
9480 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9481 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9482 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9483
9484 IEM_MC_ARG(uint64_t, u64Src, 1);
9485 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9486 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/ cShift, 2);
9487 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9488 IEM_MC_FETCH_EFLAGS(EFlags);
9489
9490 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9491
9492 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
9493 IEM_MC_COMMIT_EFLAGS(EFlags);
9494 IEM_MC_ADVANCE_RIP_AND_FINISH();
9495 IEM_MC_END();
9496 break;
9497
9498 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9499 }
9500 }
9501}
9502
9503
9504/**
9505 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
9506 */
9507FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
9508{
9509 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9510 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
9511
9512 if (IEM_IS_MODRM_REG_MODE(bRm))
9513 {
9514 switch (pVCpu->iem.s.enmEffOpSize)
9515 {
9516 case IEMMODE_16BIT:
9517 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0);
9518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9519 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9520 IEM_MC_ARG(uint16_t, u16Src, 1);
9521 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9522 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9523
9524 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9525 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9526 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9527 IEM_MC_REF_EFLAGS(pEFlags);
9528 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9529
9530 IEM_MC_ADVANCE_RIP_AND_FINISH();
9531 IEM_MC_END();
9532 break;
9533
9534 case IEMMODE_32BIT:
9535 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_386, 0);
9536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9537 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9538 IEM_MC_ARG(uint32_t, u32Src, 1);
9539 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9540 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9541
9542 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9543 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9544 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9545 IEM_MC_REF_EFLAGS(pEFlags);
9546 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9547
9548 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
9549 IEM_MC_ADVANCE_RIP_AND_FINISH();
9550 IEM_MC_END();
9551 break;
9552
9553 case IEMMODE_64BIT:
9554 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT, 0);
9555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9556 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9557 IEM_MC_ARG(uint64_t, u64Src, 1);
9558 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9559 IEM_MC_ARG(uint32_t *, pEFlags, 3);
9560
9561 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9562 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9563 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9564 IEM_MC_REF_EFLAGS(pEFlags);
9565 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9566
9567 IEM_MC_ADVANCE_RIP_AND_FINISH();
9568 IEM_MC_END();
9569 break;
9570
9571 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9572 }
9573 }
9574 else
9575 {
9576 switch (pVCpu->iem.s.enmEffOpSize)
9577 {
9578 case IEMMODE_16BIT:
9579 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0);
9580 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9581 IEM_MC_ARG(uint16_t, u16Src, 1);
9582 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9583 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9585 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9586
9587 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9589 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9590 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9591 IEM_MC_FETCH_EFLAGS(EFlags);
9592 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9593 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
9594
9595 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
9596 IEM_MC_COMMIT_EFLAGS(EFlags);
9597 IEM_MC_ADVANCE_RIP_AND_FINISH();
9598 IEM_MC_END();
9599 break;
9600
9601 case IEMMODE_32BIT:
9602 IEM_MC_BEGIN(4, 3, IEM_MC_F_MIN_386, 0);
9603 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9604 IEM_MC_ARG(uint32_t, u32Src, 1);
9605 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9606 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9608 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9609
9610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9612 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9613 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9614 IEM_MC_FETCH_EFLAGS(EFlags);
9615 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9616 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
9617
9618 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
9619 IEM_MC_COMMIT_EFLAGS(EFlags);
9620 IEM_MC_ADVANCE_RIP_AND_FINISH();
9621 IEM_MC_END();
9622 break;
9623
9624 case IEMMODE_64BIT:
9625 IEM_MC_BEGIN(4, 3, IEM_MC_F_64BIT, 0);
9626 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9627 IEM_MC_ARG(uint64_t, u64Src, 1);
9628 IEM_MC_ARG(uint8_t, cShiftArg, 2);
9629 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
9630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9631 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9632
9633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9635 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
9636 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9637 IEM_MC_FETCH_EFLAGS(EFlags);
9638 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9639 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
9640
9641 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
9642 IEM_MC_COMMIT_EFLAGS(EFlags);
9643 IEM_MC_ADVANCE_RIP_AND_FINISH();
9644 IEM_MC_END();
9645 break;
9646
9647 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9648 }
9649 }
9650}
9651
9652
9653
9654/** Opcode 0x0f 0xa4. */
9655FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
9656{
9657 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib");
9658 IEMOP_HLP_MIN_386();
9659 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9660}
9661
9662
9663/** Opcode 0x0f 0xa5. */
9664FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
9665{
9666 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL");
9667 IEMOP_HLP_MIN_386();
9668 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shld_eflags));
9669}
9670
9671
9672/** Opcode 0x0f 0xa8. */
9673FNIEMOP_DEF(iemOp_push_gs)
9674{
9675 IEMOP_MNEMONIC(push_gs, "push gs");
9676 IEMOP_HLP_MIN_386();
9677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9678 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
9679}
9680
9681
9682/** Opcode 0x0f 0xa9. */
9683FNIEMOP_DEF(iemOp_pop_gs)
9684{
9685 IEMOP_MNEMONIC(pop_gs, "pop gs");
9686 IEMOP_HLP_MIN_386();
9687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9688 IEM_MC_DEFER_TO_CIMPL_2_RET(0, iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize);
9689}
9690
9691
9692/** Opcode 0x0f 0xaa. */
9693FNIEMOP_DEF(iemOp_rsm)
9694{
9695 IEMOP_MNEMONIC0(FIXED, RSM, rsm, DISOPTYPE_HARMLESS, 0);
9696 IEMOP_HLP_MIN_386(); /* 386SL and later. */
9697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9698 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
9699 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,
9700 iemCImpl_rsm);
9701}
9702
9703
9704
9705/** Opcode 0x0f 0xab. */
9706FNIEMOP_DEF(iemOp_bts_Ev_Gv)
9707{
9708 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv");
9709 IEMOP_HLP_MIN_386();
9710 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
9711 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
9712}
9713
9714
9715/** Opcode 0x0f 0xac. */
9716FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
9717{
9718 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib");
9719 IEMOP_HLP_MIN_386();
9720 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9721}
9722
9723
9724/** Opcode 0x0f 0xad. */
9725FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
9726{
9727 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL");
9728 IEMOP_HLP_MIN_386();
9729 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shrd_eflags));
9730}
9731
9732
9733/** Opcode 0x0f 0xae mem/0. */
9734FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
9735{
9736 IEMOP_MNEMONIC(fxsave, "fxsave m512");
9737 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9738 IEMOP_RAISE_INVALID_OPCODE_RET();
9739
9740 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_PENTIUM_II, 0);
9741 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9744 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9745 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9746 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9747 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
9748 IEM_MC_END();
9749}
9750
9751
9752/** Opcode 0x0f 0xae mem/1. */
9753FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
9754{
9755 IEMOP_MNEMONIC(fxrstor, "fxrstor m512");
9756 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor)
9757 IEMOP_RAISE_INVALID_OPCODE_RET();
9758
9759 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_PENTIUM_II, 0);
9760 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9763 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9764 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9765 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/pVCpu->iem.s.enmEffOpSize, 2);
9766 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9767 IEM_MC_END();
9768}
9769
9770
9771/**
9772 * @opmaps grp15
9773 * @opcode !11/2
9774 * @oppfx none
9775 * @opcpuid sse
9776 * @opgroup og_sse_mxcsrsm
9777 * @opxcpttype 5
9778 * @optest op1=0 -> mxcsr=0
9779 * @optest op1=0x2083 -> mxcsr=0x2083
9780 * @optest op1=0xfffffffe -> value.xcpt=0xd
9781 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
9782 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6
9783 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
9784 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
9785 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
9786 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9787 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9788 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9789 */
9790FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm)
9791{
9792 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9793 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9794 IEMOP_RAISE_INVALID_OPCODE_RET();
9795
9796 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_PENTIUM_II, 0);
9797 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9800 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9801 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9802 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
9803 IEM_MC_END();
9804}
9805
9806
9807/**
9808 * @opmaps grp15
9809 * @opcode !11/3
9810 * @oppfx none
9811 * @opcpuid sse
9812 * @opgroup og_sse_mxcsrsm
9813 * @opxcpttype 5
9814 * @optest mxcsr=0 -> op1=0
9815 * @optest mxcsr=0x2083 -> op1=0x2083
9816 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
9817 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
9818 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
9819 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6
9820 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
9821 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
9822 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
9823 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
9824 */
9825FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm)
9826{
9827 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9828 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
9829 IEMOP_RAISE_INVALID_OPCODE_RET();
9830
9831 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_PENTIUM_II, 0);
9832 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9835 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
9836 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9837 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_stmxcsr, iEffSeg, GCPtrEff);
9838 IEM_MC_END();
9839}
9840
9841
9842/**
9843 * @opmaps grp15
9844 * @opcode !11/4
9845 * @oppfx none
9846 * @opcpuid xsave
9847 * @opgroup og_system
9848 * @opxcpttype none
9849 */
9850FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm)
9851{
9852 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, M_RW, DISOPTYPE_HARMLESS, 0);
9853 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9854 IEMOP_RAISE_INVALID_OPCODE_RET();
9855
9856 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_CORE, 0);
9857 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9860 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9861 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9862 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9863 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize);
9864 IEM_MC_END();
9865}
9866
9867
9868/**
9869 * @opmaps grp15
9870 * @opcode !11/5
9871 * @oppfx none
9872 * @opcpuid xsave
9873 * @opgroup og_system
9874 * @opxcpttype none
9875 */
9876FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm)
9877{
9878 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, M_RO, DISOPTYPE_HARMLESS, 0);
9879 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor)
9880 IEMOP_RAISE_INVALID_OPCODE_RET();
9881
9882 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_CORE, 0);
9883 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9886 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9887 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9888 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 2);
9889 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize);
9890 IEM_MC_END();
9891}
9892
9893/** Opcode 0x0f 0xae mem/6. */
9894FNIEMOP_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
9895
9896/**
9897 * @opmaps grp15
9898 * @opcode !11/7
9899 * @oppfx none
9900 * @opcpuid clfsh
9901 * @opgroup og_cachectl
9902 * @optest op1=1 ->
9903 */
9904FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm)
9905{
9906 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9907 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush)
9908 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9909
9910 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
9911 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9914 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9915 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9916 IEM_MC_END();
9917}
9918
9919/**
9920 * @opmaps grp15
9921 * @opcode !11/7
9922 * @oppfx 0x66
9923 * @opcpuid clflushopt
9924 * @opgroup og_cachectl
9925 * @optest op1=1 ->
9926 */
9927FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm)
9928{
9929 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, Mb_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
9930 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt)
9931 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm);
9932
9933 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
9934 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
9935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
9936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9937 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
9938 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT, iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff);
9939 IEM_MC_END();
9940}
9941
9942
9943/** Opcode 0x0f 0xae 11b/5. */
9944FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
9945{
9946 RT_NOREF_PV(bRm);
9947 IEMOP_MNEMONIC(lfence, "lfence");
9948 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
9949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9950#ifdef RT_ARCH_ARM64
9951 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9952#else
9953 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9954 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
9955 else
9956 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9957#endif
9958 IEM_MC_ADVANCE_RIP_AND_FINISH();
9959 IEM_MC_END();
9960}
9961
9962
9963/** Opcode 0x0f 0xae 11b/6. */
9964FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
9965{
9966 RT_NOREF_PV(bRm);
9967 IEMOP_MNEMONIC(mfence, "mfence");
9968 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
9969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9970#ifdef RT_ARCH_ARM64
9971 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9972#else
9973 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9974 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
9975 else
9976 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9977#endif
9978 IEM_MC_ADVANCE_RIP_AND_FINISH();
9979 IEM_MC_END();
9980}
9981
9982
9983/** Opcode 0x0f 0xae 11b/7. */
9984FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
9985{
9986 RT_NOREF_PV(bRm);
9987 IEMOP_MNEMONIC(sfence, "sfence");
9988 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
9989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
9990#ifdef RT_ARCH_ARM64
9991 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9992#else
9993 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2)
9994 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
9995 else
9996 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
9997#endif
9998 IEM_MC_ADVANCE_RIP_AND_FINISH();
9999 IEM_MC_END();
10000}
10001
10002
10003/** Opcode 0xf3 0x0f 0xae 11b/0. */
10004FNIEMOP_DEF_1(iemOp_Grp15_rdfsbase, uint8_t, bRm)
10005{
10006 IEMOP_MNEMONIC(rdfsbase, "rdfsbase Ry");
10007 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10008 {
10009 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10011 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10012 IEM_MC_LOCAL(uint64_t, u64Dst);
10013 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_FS);
10014 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10015 IEM_MC_ADVANCE_RIP_AND_FINISH();
10016 IEM_MC_END();
10017 }
10018 else
10019 {
10020 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
10021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10022 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10023 IEM_MC_LOCAL(uint32_t, u32Dst);
10024 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_FS);
10025 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10026 IEM_MC_ADVANCE_RIP_AND_FINISH();
10027 IEM_MC_END();
10028 }
10029}
10030
10031
10032/** Opcode 0xf3 0x0f 0xae 11b/1. */
10033FNIEMOP_DEF_1(iemOp_Grp15_rdgsbase, uint8_t, bRm)
10034{
10035 IEMOP_MNEMONIC(rdgsbase, "rdgsbase Ry");
10036 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10037 {
10038 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10040 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10041 IEM_MC_LOCAL(uint64_t, u64Dst);
10042 IEM_MC_FETCH_SREG_BASE_U64(u64Dst, X86_SREG_GS);
10043 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Dst);
10044 IEM_MC_ADVANCE_RIP_AND_FINISH();
10045 IEM_MC_END();
10046 }
10047 else
10048 {
10049 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
10050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10051 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10052 IEM_MC_LOCAL(uint32_t, u32Dst);
10053 IEM_MC_FETCH_SREG_BASE_U32(u32Dst, X86_SREG_GS);
10054 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Dst);
10055 IEM_MC_ADVANCE_RIP_AND_FINISH();
10056 IEM_MC_END();
10057 }
10058}
10059
10060
10061/** Opcode 0xf3 0x0f 0xae 11b/2. */
10062FNIEMOP_DEF_1(iemOp_Grp15_wrfsbase, uint8_t, bRm)
10063{
10064 IEMOP_MNEMONIC(wrfsbase, "wrfsbase Ry");
10065 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10066 {
10067 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10069 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10070 IEM_MC_LOCAL(uint64_t, u64Dst);
10071 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10072 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10073 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u64Dst);
10074 IEM_MC_ADVANCE_RIP_AND_FINISH();
10075 IEM_MC_END();
10076 }
10077 else
10078 {
10079 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
10080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10081 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10082 IEM_MC_LOCAL(uint32_t, u32Dst);
10083 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10084 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_FS, u32Dst);
10085 IEM_MC_ADVANCE_RIP_AND_FINISH();
10086 IEM_MC_END();
10087 }
10088}
10089
10090
10091/** Opcode 0xf3 0x0f 0xae 11b/3. */
10092FNIEMOP_DEF_1(iemOp_Grp15_wrgsbase, uint8_t, bRm)
10093{
10094 IEMOP_MNEMONIC(wrgsbase, "wrgsbase Ry");
10095 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
10096 {
10097 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10099 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10100 IEM_MC_LOCAL(uint64_t, u64Dst);
10101 IEM_MC_FETCH_GREG_U64(u64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10102 IEM_MC_MAYBE_RAISE_NON_CANONICAL_ADDR_GP0(u64Dst);
10103 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u64Dst);
10104 IEM_MC_ADVANCE_RIP_AND_FINISH();
10105 IEM_MC_END();
10106 }
10107 else
10108 {
10109 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
10110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fFsGsBase);
10111 IEM_MC_MAYBE_RAISE_FSGSBASE_XCPT();
10112 IEM_MC_LOCAL(uint32_t, u32Dst);
10113 IEM_MC_FETCH_GREG_U32(u32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10114 IEM_MC_STORE_SREG_BASE_U64(X86_SREG_GS, u32Dst);
10115 IEM_MC_ADVANCE_RIP_AND_FINISH();
10116 IEM_MC_END();
10117 }
10118}
10119
10120
10121/**
10122 * Group 15 jump table for register variant.
10123 */
10124IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] =
10125{ /* pfx: none, 066h, 0f3h, 0f2h */
10126 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,
10127 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,
10128 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,
10129 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,
10130 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
10131 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10132 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10133 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10134};
10135AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4);
10136
10137
10138/**
10139 * Group 15 jump table for memory variant.
10140 */
10141IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] =
10142{ /* pfx: none, 066h, 0f3h, 0f2h */
10143 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10144 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10145 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10146 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10147 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10148 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10149 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10150 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
10151};
10152AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4);
10153
10154
10155/** Opcode 0x0f 0xae. */
10156FNIEMOP_DEF(iemOp_Grp15)
10157{
10158 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */
10159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10160 if (IEM_IS_MODRM_REG_MODE(bRm))
10161 /* register, register */
10162 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10163 + pVCpu->iem.s.idxPrefix], bRm);
10164 /* memory, register */
10165 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
10166 + pVCpu->iem.s.idxPrefix], bRm);
10167}
10168
10169
10170/** Opcode 0x0f 0xaf. */
10171FNIEMOP_DEF(iemOp_imul_Gv_Ev)
10172{
10173 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev");
10174 IEMOP_HLP_MIN_386();
10175 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10176 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags);
10177 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_MIN_386);
10178}
10179
10180
10181/** Opcode 0x0f 0xb0. */
10182FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
10183{
10184 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb");
10185 IEMOP_HLP_MIN_486();
10186 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10187
10188 if (IEM_IS_MODRM_REG_MODE(bRm))
10189 {
10190 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486, 0);
10191 IEMOP_HLP_DONE_DECODING();
10192 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10193 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10194 IEM_MC_ARG(uint8_t, u8Src, 2);
10195 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10196
10197 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10198 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10199 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
10200 IEM_MC_REF_EFLAGS(pEFlags);
10201 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10202 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10203 else
10204 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10205
10206 IEM_MC_ADVANCE_RIP_AND_FINISH();
10207 IEM_MC_END();
10208 }
10209 else
10210 {
10211 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0);
10212 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10213 IEM_MC_ARG(uint8_t *, pu8Al, 1);
10214 IEM_MC_ARG(uint8_t, u8Src, 2);
10215 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10217 IEM_MC_LOCAL(uint8_t, u8Al);
10218 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10219
10220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10221 IEMOP_HLP_DONE_DECODING();
10222 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10223 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10224 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
10225 IEM_MC_FETCH_EFLAGS(EFlags);
10226 IEM_MC_REF_LOCAL(pu8Al, u8Al);
10227 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10228 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
10229 else
10230 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
10231
10232 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
10233 IEM_MC_COMMIT_EFLAGS(EFlags);
10234 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
10235 IEM_MC_ADVANCE_RIP_AND_FINISH();
10236 IEM_MC_END();
10237 }
10238}
10239
10240/** Opcode 0x0f 0xb1. */
10241FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
10242{
10243 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv");
10244 IEMOP_HLP_MIN_486();
10245 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10246
10247 if (IEM_IS_MODRM_REG_MODE(bRm))
10248 {
10249 switch (pVCpu->iem.s.enmEffOpSize)
10250 {
10251 case IEMMODE_16BIT:
10252 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486, 0);
10253 IEMOP_HLP_DONE_DECODING();
10254 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10255 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10256 IEM_MC_ARG(uint16_t, u16Src, 2);
10257 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10258
10259 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10260 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10261 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
10262 IEM_MC_REF_EFLAGS(pEFlags);
10263 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10264 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10265 else
10266 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10267
10268 IEM_MC_ADVANCE_RIP_AND_FINISH();
10269 IEM_MC_END();
10270 break;
10271
10272 case IEMMODE_32BIT:
10273 IEM_MC_BEGIN(4, 0, IEM_MC_F_MIN_486, 0);
10274 IEMOP_HLP_DONE_DECODING();
10275 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10276 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10277 IEM_MC_ARG(uint32_t, u32Src, 2);
10278 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10279
10280 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10281 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10282 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
10283 IEM_MC_REF_EFLAGS(pEFlags);
10284 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10285 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10286 else
10287 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10288
10289 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10290 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
10291 } IEM_MC_ELSE() {
10292 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
10293 } IEM_MC_ENDIF();
10294
10295 IEM_MC_ADVANCE_RIP_AND_FINISH();
10296 IEM_MC_END();
10297 break;
10298
10299 case IEMMODE_64BIT:
10300 IEM_MC_BEGIN(4, 0, IEM_MC_F_64BIT, 0);
10301 IEMOP_HLP_DONE_DECODING();
10302 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10303 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10304#ifdef RT_ARCH_X86
10305 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10306#else
10307 IEM_MC_ARG(uint64_t, u64Src, 2);
10308#endif
10309 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10310
10311 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
10312 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
10313 IEM_MC_REF_EFLAGS(pEFlags);
10314#ifdef RT_ARCH_X86
10315 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10316 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10317 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10318 else
10319 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10320#else
10321 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10322 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10323 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10324 else
10325 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10326#endif
10327
10328 IEM_MC_ADVANCE_RIP_AND_FINISH();
10329 IEM_MC_END();
10330 break;
10331
10332 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10333 }
10334 }
10335 else
10336 {
10337 switch (pVCpu->iem.s.enmEffOpSize)
10338 {
10339 case IEMMODE_16BIT:
10340 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0);
10341 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10342 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
10343 IEM_MC_ARG(uint16_t, u16Src, 2);
10344 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10346 IEM_MC_LOCAL(uint16_t, u16Ax);
10347 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10348
10349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10350 IEMOP_HLP_DONE_DECODING();
10351 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10352 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10353 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
10354 IEM_MC_FETCH_EFLAGS(EFlags);
10355 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
10356 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10357 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
10358 else
10359 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
10360
10361 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
10362 IEM_MC_COMMIT_EFLAGS(EFlags);
10363 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
10364 IEM_MC_ADVANCE_RIP_AND_FINISH();
10365 IEM_MC_END();
10366 break;
10367
10368 case IEMMODE_32BIT:
10369 IEM_MC_BEGIN(4, 4, IEM_MC_F_MIN_486, 0);
10370 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10371 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
10372 IEM_MC_ARG(uint32_t, u32Src, 2);
10373 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10375 IEM_MC_LOCAL(uint32_t, u32Eax);
10376 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10377
10378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10379 IEMOP_HLP_DONE_DECODING();
10380 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10381 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10382 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
10383 IEM_MC_FETCH_EFLAGS(EFlags);
10384 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
10385 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10386 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
10387 else
10388 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
10389
10390 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
10391 IEM_MC_COMMIT_EFLAGS(EFlags);
10392
10393 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10394 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
10395 } IEM_MC_ENDIF();
10396
10397 IEM_MC_ADVANCE_RIP_AND_FINISH();
10398 IEM_MC_END();
10399 break;
10400
10401 case IEMMODE_64BIT:
10402 IEM_MC_BEGIN(4, 4, IEM_MC_F_64BIT, 0);
10403 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10404 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
10405#ifdef RT_ARCH_X86
10406 IEM_MC_ARG(uint64_t *, pu64Src, 2);
10407#else
10408 IEM_MC_ARG(uint64_t, u64Src, 2);
10409#endif
10410 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
10411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10412 IEM_MC_LOCAL(uint64_t, u64Rax);
10413 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10414
10415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10416 IEMOP_HLP_DONE_DECODING();
10417 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10418 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
10419 IEM_MC_FETCH_EFLAGS(EFlags);
10420 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
10421#ifdef RT_ARCH_X86
10422 IEM_MC_REF_GREG_U64(pu64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10423 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10424 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
10425 else
10426 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
10427#else
10428 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));
10429 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10430 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
10431 else
10432 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
10433#endif
10434
10435 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
10436 IEM_MC_COMMIT_EFLAGS(EFlags);
10437 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
10438 IEM_MC_ADVANCE_RIP_AND_FINISH();
10439 IEM_MC_END();
10440 break;
10441
10442 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10443 }
10444 }
10445}
10446
10447
10448/** Opcode 0x0f 0xb2. */
10449FNIEMOP_DEF(iemOp_lss_Gv_Mp)
10450{
10451 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp");
10452 IEMOP_HLP_MIN_386();
10453 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10454 if (IEM_IS_MODRM_REG_MODE(bRm))
10455 IEMOP_RAISE_INVALID_OPCODE_RET();
10456 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
10457}
10458
10459
10460/** Opcode 0x0f 0xb3. */
10461FNIEMOP_DEF(iemOp_btr_Ev_Gv)
10462{
10463 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv");
10464 IEMOP_HLP_MIN_386();
10465 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
10466 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
10467}
10468
10469
10470/** Opcode 0x0f 0xb4. */
10471FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
10472{
10473 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp");
10474 IEMOP_HLP_MIN_386();
10475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10476 if (IEM_IS_MODRM_REG_MODE(bRm))
10477 IEMOP_RAISE_INVALID_OPCODE_RET();
10478 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
10479}
10480
10481
10482/** Opcode 0x0f 0xb5. */
10483FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
10484{
10485 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp");
10486 IEMOP_HLP_MIN_386();
10487 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10488 if (IEM_IS_MODRM_REG_MODE(bRm))
10489 IEMOP_RAISE_INVALID_OPCODE_RET();
10490 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
10491}
10492
10493
10494/** Opcode 0x0f 0xb6. */
10495FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
10496{
10497 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb");
10498 IEMOP_HLP_MIN_386();
10499
10500 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10501
10502 /*
10503 * If rm is denoting a register, no more instruction bytes.
10504 */
10505 if (IEM_IS_MODRM_REG_MODE(bRm))
10506 {
10507 switch (pVCpu->iem.s.enmEffOpSize)
10508 {
10509 case IEMMODE_16BIT:
10510 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
10511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10512 IEM_MC_LOCAL(uint16_t, u16Value);
10513 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10514 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10515 IEM_MC_ADVANCE_RIP_AND_FINISH();
10516 IEM_MC_END();
10517 break;
10518
10519 case IEMMODE_32BIT:
10520 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
10521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10522 IEM_MC_LOCAL(uint32_t, u32Value);
10523 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10524 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10525 IEM_MC_ADVANCE_RIP_AND_FINISH();
10526 IEM_MC_END();
10527 break;
10528
10529 case IEMMODE_64BIT:
10530 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10532 IEM_MC_LOCAL(uint64_t, u64Value);
10533 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10534 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10535 IEM_MC_ADVANCE_RIP_AND_FINISH();
10536 IEM_MC_END();
10537 break;
10538
10539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10540 }
10541 }
10542 else
10543 {
10544 /*
10545 * We're loading a register from memory.
10546 */
10547 switch (pVCpu->iem.s.enmEffOpSize)
10548 {
10549 case IEMMODE_16BIT:
10550 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
10551 IEM_MC_LOCAL(uint16_t, u16Value);
10552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10555 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10556 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
10557 IEM_MC_ADVANCE_RIP_AND_FINISH();
10558 IEM_MC_END();
10559 break;
10560
10561 case IEMMODE_32BIT:
10562 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
10563 IEM_MC_LOCAL(uint32_t, u32Value);
10564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10567 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10568 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10569 IEM_MC_ADVANCE_RIP_AND_FINISH();
10570 IEM_MC_END();
10571 break;
10572
10573 case IEMMODE_64BIT:
10574 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
10575 IEM_MC_LOCAL(uint64_t, u64Value);
10576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10579 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10580 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10581 IEM_MC_ADVANCE_RIP_AND_FINISH();
10582 IEM_MC_END();
10583 break;
10584
10585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10586 }
10587 }
10588}
10589
10590
10591/** Opcode 0x0f 0xb7. */
10592FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
10593{
10594 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew");
10595 IEMOP_HLP_MIN_386();
10596
10597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10598
10599 /** @todo Not entirely sure how the operand size prefix is handled here,
10600 * assuming that it will be ignored. Would be nice to have a few
10601 * test for this. */
10602
10603 /** @todo There should be no difference in the behaviour whether REX.W is
10604 * present or not... */
10605
10606 /*
10607 * If rm is denoting a register, no more instruction bytes.
10608 */
10609 if (IEM_IS_MODRM_REG_MODE(bRm))
10610 {
10611 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10612 {
10613 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
10614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10615 IEM_MC_LOCAL(uint32_t, u32Value);
10616 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10617 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10618 IEM_MC_ADVANCE_RIP_AND_FINISH();
10619 IEM_MC_END();
10620 }
10621 else
10622 {
10623 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
10624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10625 IEM_MC_LOCAL(uint64_t, u64Value);
10626 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
10627 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10628 IEM_MC_ADVANCE_RIP_AND_FINISH();
10629 IEM_MC_END();
10630 }
10631 }
10632 else
10633 {
10634 /*
10635 * We're loading a register from memory.
10636 */
10637 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
10638 {
10639 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
10640 IEM_MC_LOCAL(uint32_t, u32Value);
10641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10644 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10645 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
10646 IEM_MC_ADVANCE_RIP_AND_FINISH();
10647 IEM_MC_END();
10648 }
10649 else
10650 {
10651 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
10652 IEM_MC_LOCAL(uint64_t, u64Value);
10653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10656 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10657 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
10658 IEM_MC_ADVANCE_RIP_AND_FINISH();
10659 IEM_MC_END();
10660 }
10661 }
10662}
10663
10664
10665/** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */
10666FNIEMOP_UD_STUB(iemOp_jmpe);
10667
10668
10669/** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */
10670FNIEMOP_DEF(iemOp_popcnt_Gv_Ev)
10671{
10672 IEMOP_MNEMONIC2(RM, POPCNT, popcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
10673 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fPopCnt)
10674 return iemOp_InvalidNeedRM(pVCpu);
10675#ifndef TST_IEM_CHECK_MC
10676# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
10677 static const IEMOPBINSIZES s_Native =
10678 { NULL, NULL, iemAImpl_popcnt_u16, NULL, iemAImpl_popcnt_u32, NULL, iemAImpl_popcnt_u64, NULL };
10679# endif
10680 static const IEMOPBINSIZES s_Fallback =
10681 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL };
10682#endif
10683 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback);
10684 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_NOT_286_OR_OLDER);
10685}
10686
10687
10688/**
10689 * @opcode 0xb9
10690 * @opinvalid intel-modrm
10691 * @optest ->
10692 */
10693FNIEMOP_DEF(iemOp_Grp10)
10694{
10695 /*
10696 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit
10697 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM.
10698 */
10699 Log(("iemOp_Grp10 aka UD1 -> #UD\n"));
10700 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZES); /* just picked Gb,Eb here. */
10701 return FNIEMOP_CALL(iemOp_InvalidNeedRM);
10702}
10703
10704
10705/**
10706 * Body for group 8 bit instruction.
10707 */
10708#define IEMOP_BODY_BIT_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10709 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10710 \
10711 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10712 { \
10713 /* register destination. */ \
10714 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10715 \
10716 switch (pVCpu->iem.s.enmEffOpSize) \
10717 { \
10718 case IEMMODE_16BIT: \
10719 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
10720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10721 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10722 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10723 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10724 \
10725 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10726 IEM_MC_REF_EFLAGS(pEFlags); \
10727 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10728 \
10729 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10730 IEM_MC_END(); \
10731 break; \
10732 \
10733 case IEMMODE_32BIT: \
10734 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
10735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10736 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10737 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10738 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10739 \
10740 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10741 IEM_MC_REF_EFLAGS(pEFlags); \
10742 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10743 \
10744 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
10745 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10746 IEM_MC_END(); \
10747 break; \
10748 \
10749 case IEMMODE_64BIT: \
10750 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
10751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10752 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10753 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10754 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10755 \
10756 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10757 IEM_MC_REF_EFLAGS(pEFlags); \
10758 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10759 \
10760 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10761 IEM_MC_END(); \
10762 break; \
10763 \
10764 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10765 } \
10766 } \
10767 else \
10768 { \
10769 /* memory destination. */ \
10770 /** @todo test negative bit offsets! */ \
10771 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10772 { \
10773 switch (pVCpu->iem.s.enmEffOpSize) \
10774 { \
10775 case IEMMODE_16BIT: \
10776 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
10777 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10779 \
10780 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10781 IEMOP_HLP_DONE_DECODING(); \
10782 \
10783 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10784 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10785 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10786 \
10787 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10788 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10789 IEM_MC_FETCH_EFLAGS(EFlags); \
10790 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10791 \
10792 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
10793 IEM_MC_COMMIT_EFLAGS(EFlags); \
10794 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10795 IEM_MC_END(); \
10796 break; \
10797 \
10798 case IEMMODE_32BIT: \
10799 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
10800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10802 \
10803 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10804 IEMOP_HLP_DONE_DECODING(); \
10805 \
10806 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10807 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10808 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10809 \
10810 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10811 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10812 IEM_MC_FETCH_EFLAGS(EFlags); \
10813 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10814 \
10815 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
10816 IEM_MC_COMMIT_EFLAGS(EFlags); \
10817 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10818 IEM_MC_END(); \
10819 break; \
10820 \
10821 case IEMMODE_64BIT: \
10822 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
10823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10824 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10825 \
10826 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10827 IEMOP_HLP_DONE_DECODING(); \
10828 \
10829 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10830 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10831 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10832 \
10833 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10834 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10835 IEM_MC_FETCH_EFLAGS(EFlags); \
10836 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10837 \
10838 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
10839 IEM_MC_COMMIT_EFLAGS(EFlags); \
10840 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10841 IEM_MC_END(); \
10842 break; \
10843 \
10844 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10845 } \
10846 } \
10847 else \
10848 { \
10849 (void)0
10850/* Separate macro to work around parsing issue in IEMAllInstPython.py */
10851#define IEMOP_BODY_BIT_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
10852 switch (pVCpu->iem.s.enmEffOpSize) \
10853 { \
10854 case IEMMODE_16BIT: \
10855 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
10856 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10858 \
10859 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10860 IEMOP_HLP_DONE_DECODING(); \
10861 \
10862 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
10863 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10864 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10865 \
10866 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10867 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10868 IEM_MC_FETCH_EFLAGS(EFlags); \
10869 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
10870 \
10871 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
10872 IEM_MC_COMMIT_EFLAGS(EFlags); \
10873 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10874 IEM_MC_END(); \
10875 break; \
10876 \
10877 case IEMMODE_32BIT: \
10878 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
10879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10881 \
10882 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10883 IEMOP_HLP_DONE_DECODING(); \
10884 \
10885 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10886 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
10887 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10888 \
10889 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10890 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10891 IEM_MC_FETCH_EFLAGS(EFlags); \
10892 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
10893 \
10894 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
10895 IEM_MC_COMMIT_EFLAGS(EFlags); \
10896 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10897 IEM_MC_END(); \
10898 break; \
10899 \
10900 case IEMMODE_64BIT: \
10901 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
10902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
10904 \
10905 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10906 IEMOP_HLP_DONE_DECODING(); \
10907 \
10908 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
10909 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
10910 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
10911 \
10912 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10913 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
10914 IEM_MC_FETCH_EFLAGS(EFlags); \
10915 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
10916 \
10917 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
10918 IEM_MC_COMMIT_EFLAGS(EFlags); \
10919 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10920 IEM_MC_END(); \
10921 break; \
10922 \
10923 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10924 } \
10925 } \
10926 } \
10927 (void)0
10928
10929/* Read-only version (bt) */
10930#define IEMOP_BODY_BIT_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
10931 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); \
10932 \
10933 if (IEM_IS_MODRM_REG_MODE(bRm)) \
10934 { \
10935 /* register destination. */ \
10936 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
10937 \
10938 switch (pVCpu->iem.s.enmEffOpSize) \
10939 { \
10940 case IEMMODE_16BIT: \
10941 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
10942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10943 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
10944 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
10945 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10946 \
10947 IEM_MC_REF_GREG_U16_CONST(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10948 IEM_MC_REF_EFLAGS(pEFlags); \
10949 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
10950 \
10951 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10952 IEM_MC_END(); \
10953 break; \
10954 \
10955 case IEMMODE_32BIT: \
10956 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
10957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10958 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
10959 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
10960 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10961 \
10962 IEM_MC_REF_GREG_U32_CONST(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10963 IEM_MC_REF_EFLAGS(pEFlags); \
10964 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
10965 \
10966 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10967 IEM_MC_END(); \
10968 break; \
10969 \
10970 case IEMMODE_64BIT: \
10971 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
10972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
10973 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
10974 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
10975 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10976 \
10977 IEM_MC_REF_GREG_U64_CONST(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
10978 IEM_MC_REF_EFLAGS(pEFlags); \
10979 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
10980 \
10981 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
10982 IEM_MC_END(); \
10983 break; \
10984 \
10985 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10986 } \
10987 } \
10988 else \
10989 { \
10990 /* memory destination. */ \
10991 /** @todo test negative bit offsets! */ \
10992 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
10993 { \
10994 switch (pVCpu->iem.s.enmEffOpSize) \
10995 { \
10996 case IEMMODE_16BIT: \
10997 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
10998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
10999 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11000 \
11001 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11002 IEMOP_HLP_DONE_DECODING(); \
11003 \
11004 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11005 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
11006 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11007 \
11008 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ bImm & 0x0f, 1); \
11009 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11010 IEM_MC_FETCH_EFLAGS(EFlags); \
11011 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
11012 \
11013 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
11014 IEM_MC_COMMIT_EFLAGS(EFlags); \
11015 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11016 IEM_MC_END(); \
11017 break; \
11018 \
11019 case IEMMODE_32BIT: \
11020 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
11021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11023 \
11024 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11025 IEMOP_HLP_DONE_DECODING(); \
11026 \
11027 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11028 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
11029 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11030 \
11031 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ bImm & 0x1f, 1); \
11032 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11033 IEM_MC_FETCH_EFLAGS(EFlags); \
11034 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
11035 \
11036 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
11037 IEM_MC_COMMIT_EFLAGS(EFlags); \
11038 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11039 IEM_MC_END(); \
11040 break; \
11041 \
11042 case IEMMODE_64BIT: \
11043 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
11044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
11046 \
11047 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
11048 IEMOP_HLP_DONE_DECODING(); \
11049 \
11050 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
11051 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
11052 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
11053 \
11054 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ bImm & 0x3f, 1); \
11055 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
11056 IEM_MC_FETCH_EFLAGS(EFlags); \
11057 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
11058 \
11059 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
11060 IEM_MC_COMMIT_EFLAGS(EFlags); \
11061 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11062 IEM_MC_END(); \
11063 break; \
11064 \
11065 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11066 } \
11067 } \
11068 else \
11069 { \
11070 IEMOP_HLP_DONE_DECODING(); \
11071 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
11072 } \
11073 } \
11074 (void)0
11075
11076
11077/** Opcode 0x0f 0xba /4. */
11078FNIEMOPRM_DEF(iemOp_Grp8_bt_Ev_Ib)
11079{
11080 IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib");
11081 IEMOP_BODY_BIT_Ev_Ib_RO(iemAImpl_bt_u16, iemAImpl_bt_u32, iemAImpl_bt_u64);
11082}
11083
11084
11085/** Opcode 0x0f 0xba /5. */
11086FNIEMOPRM_DEF(iemOp_Grp8_bts_Ev_Ib)
11087{
11088 IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib");
11089 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_bts_u16, iemAImpl_bts_u32, iemAImpl_bts_u64);
11090 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_bts_u16_locked, iemAImpl_bts_u32_locked, iemAImpl_bts_u64_locked);
11091}
11092
11093
11094/** Opcode 0x0f 0xba /6. */
11095FNIEMOPRM_DEF(iemOp_Grp8_btr_Ev_Ib)
11096{
11097 IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib");
11098 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btr_u16, iemAImpl_btr_u32, iemAImpl_btr_u64);
11099 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btr_u16_locked, iemAImpl_btr_u32_locked, iemAImpl_btr_u64_locked);
11100}
11101
11102
11103/** Opcode 0x0f 0xba /7. */
11104FNIEMOPRM_DEF(iemOp_Grp8_btc_Ev_Ib)
11105{
11106 IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib");
11107 IEMOP_BODY_BIT_Ev_Ib_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11108 IEMOP_BODY_BIT_Ev_Ib_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11109}
11110
11111
11112/** Opcode 0x0f 0xba. */
11113FNIEMOP_DEF(iemOp_Grp8)
11114{
11115 IEMOP_HLP_MIN_386();
11116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11117 switch (IEM_GET_MODRM_REG_8(bRm))
11118 {
11119 case 4: return FNIEMOP_CALL_1(iemOp_Grp8_bt_Ev_Ib, bRm);
11120 case 5: return FNIEMOP_CALL_1(iemOp_Grp8_bts_Ev_Ib, bRm);
11121 case 6: return FNIEMOP_CALL_1(iemOp_Grp8_btr_Ev_Ib, bRm);
11122 case 7: return FNIEMOP_CALL_1(iemOp_Grp8_btc_Ev_Ib, bRm);
11123
11124 case 0: case 1: case 2: case 3:
11125 /* Both AMD and Intel want full modr/m decoding and imm8. */
11126 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm);
11127
11128 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11129 }
11130}
11131
11132
11133/** Opcode 0x0f 0xbb. */
11134FNIEMOP_DEF(iemOp_btc_Ev_Gv)
11135{
11136 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv");
11137 IEMOP_HLP_MIN_386();
11138 IEMOP_BODY_BIT_Ev_Gv_RW( iemAImpl_btc_u16, iemAImpl_btc_u32, iemAImpl_btc_u64);
11139 IEMOP_BODY_BIT_Ev_Gv_LOCKED(iemAImpl_btc_u16_locked, iemAImpl_btc_u32_locked, iemAImpl_btc_u64_locked);
11140}
11141
11142
11143/**
11144 * Common worker for BSF and BSR instructions.
11145 *
11146 * These cannot use iemOpHlpBinaryOperator_rv_rm because they don't always write
11147 * the destination register, which means that for 32-bit operations the high
11148 * bits must be left alone.
11149 *
11150 * @param pImpl Pointer to the instruction implementation (assembly).
11151 */
11152FNIEMOP_DEF_1(iemOpHlpBitScanOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
11153{
11154 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11155
11156 /*
11157 * If rm is denoting a register, no more instruction bytes.
11158 */
11159 if (IEM_IS_MODRM_REG_MODE(bRm))
11160 {
11161 switch (pVCpu->iem.s.enmEffOpSize)
11162 {
11163 case IEMMODE_16BIT:
11164 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
11165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11166 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11167 IEM_MC_ARG(uint16_t, u16Src, 1);
11168 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11169
11170 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11171 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11172 IEM_MC_REF_EFLAGS(pEFlags);
11173 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11174
11175 IEM_MC_ADVANCE_RIP_AND_FINISH();
11176 IEM_MC_END();
11177 break;
11178
11179 case IEMMODE_32BIT:
11180 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
11181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11182 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11183 IEM_MC_ARG(uint32_t, u32Src, 1);
11184 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11185
11186 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11187 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11188 IEM_MC_REF_EFLAGS(pEFlags);
11189 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11190 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11191 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11192 } IEM_MC_ENDIF();
11193 IEM_MC_ADVANCE_RIP_AND_FINISH();
11194 IEM_MC_END();
11195 break;
11196
11197 case IEMMODE_64BIT:
11198 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
11199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11200 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11201 IEM_MC_ARG(uint64_t, u64Src, 1);
11202 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11203
11204 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
11205 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11206 IEM_MC_REF_EFLAGS(pEFlags);
11207 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11208
11209 IEM_MC_ADVANCE_RIP_AND_FINISH();
11210 IEM_MC_END();
11211 break;
11212
11213 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11214 }
11215 }
11216 else
11217 {
11218 /*
11219 * We're accessing memory.
11220 */
11221 switch (pVCpu->iem.s.enmEffOpSize)
11222 {
11223 case IEMMODE_16BIT:
11224 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
11225 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11226 IEM_MC_ARG(uint16_t, u16Src, 1);
11227 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11229
11230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11232 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11233 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11234 IEM_MC_REF_EFLAGS(pEFlags);
11235 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
11236
11237 IEM_MC_ADVANCE_RIP_AND_FINISH();
11238 IEM_MC_END();
11239 break;
11240
11241 case IEMMODE_32BIT:
11242 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
11243 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11244 IEM_MC_ARG(uint32_t, u32Src, 1);
11245 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11247
11248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11250 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11251 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11252 IEM_MC_REF_EFLAGS(pEFlags);
11253 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
11254
11255 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11256 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11257 } IEM_MC_ENDIF();
11258 IEM_MC_ADVANCE_RIP_AND_FINISH();
11259 IEM_MC_END();
11260 break;
11261
11262 case IEMMODE_64BIT:
11263 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
11264 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11265 IEM_MC_ARG(uint64_t, u64Src, 1);
11266 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11268
11269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11271 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11272 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));
11273 IEM_MC_REF_EFLAGS(pEFlags);
11274 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
11275
11276 IEM_MC_ADVANCE_RIP_AND_FINISH();
11277 IEM_MC_END();
11278 break;
11279
11280 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11281 }
11282 }
11283}
11284
11285
11286/** Opcode 0x0f 0xbc. */
11287FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
11288{
11289 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev");
11290 IEMOP_HLP_MIN_386();
11291 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11292 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsf_eflags));
11293}
11294
11295
11296/** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */
11297FNIEMOP_DEF(iemOp_tzcnt_Gv_Ev)
11298{
11299 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11300 return FNIEMOP_CALL(iemOp_bsf_Gv_Ev);
11301 IEMOP_MNEMONIC2(RM, TZCNT, tzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11302
11303#ifndef TST_IEM_CHECK_MC
11304 static const IEMOPBINSIZES s_iemAImpl_tzcnt =
11305 { NULL, NULL, iemAImpl_tzcnt_u16, NULL, iemAImpl_tzcnt_u32, NULL, iemAImpl_tzcnt_u64, NULL };
11306 static const IEMOPBINSIZES s_iemAImpl_tzcnt_amd =
11307 { NULL, NULL, iemAImpl_tzcnt_u16_amd, NULL, iemAImpl_tzcnt_u32_amd, NULL, iemAImpl_tzcnt_u64_amd, NULL };
11308 static const IEMOPBINSIZES s_iemAImpl_tzcnt_intel =
11309 { NULL, NULL, iemAImpl_tzcnt_u16_intel, NULL, iemAImpl_tzcnt_u32_intel, NULL, iemAImpl_tzcnt_u64_intel, NULL };
11310 static const IEMOPBINSIZES * const s_iemAImpl_tzcnt_eflags[2][4] =
11311 {
11312 { &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt_intel },
11313 { &s_iemAImpl_tzcnt, &s_iemAImpl_tzcnt_intel, &s_iemAImpl_tzcnt_amd, &s_iemAImpl_tzcnt }
11314 };
11315#endif
11316 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11317 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags,
11318 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11319 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_NOT_286_OR_OLDER);
11320}
11321
11322
11323/** Opcode 0x0f 0xbd. */
11324FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
11325{
11326 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev");
11327 IEMOP_HLP_MIN_386();
11328 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
11329 return FNIEMOP_CALL_1(iemOpHlpBitScanOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_bsr_eflags));
11330}
11331
11332
11333/** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */
11334FNIEMOP_DEF(iemOp_lzcnt_Gv_Ev)
11335{
11336 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1)
11337 return FNIEMOP_CALL(iemOp_bsr_Gv_Ev);
11338 IEMOP_MNEMONIC2(RM, LZCNT, lzcnt, Gv, Ev, DISOPTYPE_HARMLESS, 0);
11339
11340#ifndef TST_IEM_CHECK_MC
11341 static const IEMOPBINSIZES s_iemAImpl_lzcnt =
11342 { NULL, NULL, iemAImpl_lzcnt_u16, NULL, iemAImpl_lzcnt_u32, NULL, iemAImpl_lzcnt_u64, NULL };
11343 static const IEMOPBINSIZES s_iemAImpl_lzcnt_amd =
11344 { NULL, NULL, iemAImpl_lzcnt_u16_amd, NULL, iemAImpl_lzcnt_u32_amd, NULL, iemAImpl_lzcnt_u64_amd, NULL };
11345 static const IEMOPBINSIZES s_iemAImpl_lzcnt_intel =
11346 { NULL, NULL, iemAImpl_lzcnt_u16_intel, NULL, iemAImpl_lzcnt_u32_intel, NULL, iemAImpl_lzcnt_u64_intel, NULL };
11347 static const IEMOPBINSIZES * const s_iemAImpl_lzcnt_eflags[2][4] =
11348 {
11349 { &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt_intel },
11350 { &s_iemAImpl_lzcnt, &s_iemAImpl_lzcnt_intel, &s_iemAImpl_lzcnt_amd, &s_iemAImpl_lzcnt }
11351 };
11352#endif
11353 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
11354 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags,
11355 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1);
11356 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1, IEM_MC_F_NOT_286_OR_OLDER);
11357}
11358
11359
11360
11361/** Opcode 0x0f 0xbe. */
11362FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
11363{
11364 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb");
11365 IEMOP_HLP_MIN_386();
11366
11367 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11368
11369 /*
11370 * If rm is denoting a register, no more instruction bytes.
11371 */
11372 if (IEM_IS_MODRM_REG_MODE(bRm))
11373 {
11374 switch (pVCpu->iem.s.enmEffOpSize)
11375 {
11376 case IEMMODE_16BIT:
11377 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
11378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11379 IEM_MC_LOCAL(uint16_t, u16Value);
11380 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11381 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11382 IEM_MC_ADVANCE_RIP_AND_FINISH();
11383 IEM_MC_END();
11384 break;
11385
11386 case IEMMODE_32BIT:
11387 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
11388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11389 IEM_MC_LOCAL(uint32_t, u32Value);
11390 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11391 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11392 IEM_MC_ADVANCE_RIP_AND_FINISH();
11393 IEM_MC_END();
11394 break;
11395
11396 case IEMMODE_64BIT:
11397 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
11398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11399 IEM_MC_LOCAL(uint64_t, u64Value);
11400 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11401 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11402 IEM_MC_ADVANCE_RIP_AND_FINISH();
11403 IEM_MC_END();
11404 break;
11405
11406 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11407 }
11408 }
11409 else
11410 {
11411 /*
11412 * We're loading a register from memory.
11413 */
11414 switch (pVCpu->iem.s.enmEffOpSize)
11415 {
11416 case IEMMODE_16BIT:
11417 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
11418 IEM_MC_LOCAL(uint16_t, u16Value);
11419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11422 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11423 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
11424 IEM_MC_ADVANCE_RIP_AND_FINISH();
11425 IEM_MC_END();
11426 break;
11427
11428 case IEMMODE_32BIT:
11429 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
11430 IEM_MC_LOCAL(uint32_t, u32Value);
11431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11434 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11435 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11436 IEM_MC_ADVANCE_RIP_AND_FINISH();
11437 IEM_MC_END();
11438 break;
11439
11440 case IEMMODE_64BIT:
11441 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
11442 IEM_MC_LOCAL(uint64_t, u64Value);
11443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11444 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11446 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11447 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11448 IEM_MC_ADVANCE_RIP_AND_FINISH();
11449 IEM_MC_END();
11450 break;
11451
11452 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11453 }
11454 }
11455}
11456
11457
11458/** Opcode 0x0f 0xbf. */
11459FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
11460{
11461 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew");
11462 IEMOP_HLP_MIN_386();
11463
11464 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11465
11466 /** @todo Not entirely sure how the operand size prefix is handled here,
11467 * assuming that it will be ignored. Would be nice to have a few
11468 * test for this. */
11469 /*
11470 * If rm is denoting a register, no more instruction bytes.
11471 */
11472 if (IEM_IS_MODRM_REG_MODE(bRm))
11473 {
11474 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11475 {
11476 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
11477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11478 IEM_MC_LOCAL(uint32_t, u32Value);
11479 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11480 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11481 IEM_MC_ADVANCE_RIP_AND_FINISH();
11482 IEM_MC_END();
11483 }
11484 else
11485 {
11486 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
11487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11488 IEM_MC_LOCAL(uint64_t, u64Value);
11489 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11490 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11491 IEM_MC_ADVANCE_RIP_AND_FINISH();
11492 IEM_MC_END();
11493 }
11494 }
11495 else
11496 {
11497 /*
11498 * We're loading a register from memory.
11499 */
11500 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
11501 {
11502 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
11503 IEM_MC_LOCAL(uint32_t, u32Value);
11504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11507 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11508 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
11509 IEM_MC_ADVANCE_RIP_AND_FINISH();
11510 IEM_MC_END();
11511 }
11512 else
11513 {
11514 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
11515 IEM_MC_LOCAL(uint64_t, u64Value);
11516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11519 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11520 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
11521 IEM_MC_ADVANCE_RIP_AND_FINISH();
11522 IEM_MC_END();
11523 }
11524 }
11525}
11526
11527
11528/** Opcode 0x0f 0xc0. */
11529FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
11530{
11531 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11532 IEMOP_HLP_MIN_486();
11533 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb");
11534
11535 /*
11536 * If rm is denoting a register, no more instruction bytes.
11537 */
11538 if (IEM_IS_MODRM_REG_MODE(bRm))
11539 {
11540 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486, 0);
11541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11542 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11543 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11544 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11545
11546 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11547 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11548 IEM_MC_REF_EFLAGS(pEFlags);
11549 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11550
11551 IEM_MC_ADVANCE_RIP_AND_FINISH();
11552 IEM_MC_END();
11553 }
11554 else
11555 {
11556 /*
11557 * We're accessing memory.
11558 */
11559 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0);
11560 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11561 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
11562 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11563 IEM_MC_LOCAL(uint8_t, u8RegCopy);
11564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11565 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11566
11567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11568 IEMOP_HLP_DONE_DECODING();
11569 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11570 IEM_MC_FETCH_GREG_U8(u8RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11571 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
11572 IEM_MC_FETCH_EFLAGS(EFlags);
11573 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11574 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
11575 else
11576 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
11577
11578 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
11579 IEM_MC_COMMIT_EFLAGS(EFlags);
11580 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8RegCopy);
11581 IEM_MC_ADVANCE_RIP_AND_FINISH();
11582 IEM_MC_END();
11583 }
11584}
11585
11586
11587/** Opcode 0x0f 0xc1. */
11588FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
11589{
11590 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv");
11591 IEMOP_HLP_MIN_486();
11592 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11593
11594 /*
11595 * If rm is denoting a register, no more instruction bytes.
11596 */
11597 if (IEM_IS_MODRM_REG_MODE(bRm))
11598 {
11599 switch (pVCpu->iem.s.enmEffOpSize)
11600 {
11601 case IEMMODE_16BIT:
11602 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486, 0);
11603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11604 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11605 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11606 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11607
11608 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11609 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11610 IEM_MC_REF_EFLAGS(pEFlags);
11611 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11612
11613 IEM_MC_ADVANCE_RIP_AND_FINISH();
11614 IEM_MC_END();
11615 break;
11616
11617 case IEMMODE_32BIT:
11618 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_486, 0);
11619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11620 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11621 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11622 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11623
11624 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11625 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11626 IEM_MC_REF_EFLAGS(pEFlags);
11627 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11628
11629 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
11630 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
11631 IEM_MC_ADVANCE_RIP_AND_FINISH();
11632 IEM_MC_END();
11633 break;
11634
11635 case IEMMODE_64BIT:
11636 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
11637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11638 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11639 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11640 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11641
11642 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11643 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
11644 IEM_MC_REF_EFLAGS(pEFlags);
11645 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11646
11647 IEM_MC_ADVANCE_RIP_AND_FINISH();
11648 IEM_MC_END();
11649 break;
11650
11651 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11652 }
11653 }
11654 else
11655 {
11656 /*
11657 * We're accessing memory.
11658 */
11659 switch (pVCpu->iem.s.enmEffOpSize)
11660 {
11661 case IEMMODE_16BIT:
11662 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0);
11663 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11664 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
11665 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11666 IEM_MC_LOCAL(uint16_t, u16RegCopy);
11667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11668 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11669
11670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11671 IEMOP_HLP_DONE_DECODING();
11672 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11673 IEM_MC_FETCH_GREG_U16(u16RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11674 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
11675 IEM_MC_FETCH_EFLAGS(EFlags);
11676 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11677 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
11678 else
11679 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
11680
11681 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
11682 IEM_MC_COMMIT_EFLAGS(EFlags);
11683 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16RegCopy);
11684 IEM_MC_ADVANCE_RIP_AND_FINISH();
11685 IEM_MC_END();
11686 break;
11687
11688 case IEMMODE_32BIT:
11689 IEM_MC_BEGIN(3, 4, IEM_MC_F_MIN_486, 0);
11690 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11691 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
11692 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11693 IEM_MC_LOCAL(uint32_t, u32RegCopy);
11694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11695 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11696
11697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11698 IEMOP_HLP_DONE_DECODING();
11699 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11700 IEM_MC_FETCH_GREG_U32(u32RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11701 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
11702 IEM_MC_FETCH_EFLAGS(EFlags);
11703 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11704 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
11705 else
11706 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
11707
11708 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
11709 IEM_MC_COMMIT_EFLAGS(EFlags);
11710 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32RegCopy);
11711 IEM_MC_ADVANCE_RIP_AND_FINISH();
11712 IEM_MC_END();
11713 break;
11714
11715 case IEMMODE_64BIT:
11716 IEM_MC_BEGIN(3, 4, IEM_MC_F_64BIT, 0);
11717 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11718 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
11719 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11720 IEM_MC_LOCAL(uint64_t, u64RegCopy);
11721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11722 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11723
11724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11725 IEMOP_HLP_DONE_DECODING();
11726 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11727 IEM_MC_FETCH_GREG_U64(u64RegCopy, IEM_GET_MODRM_REG(pVCpu, bRm));
11728 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
11729 IEM_MC_FETCH_EFLAGS(EFlags);
11730 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11731 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
11732 else
11733 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
11734
11735 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
11736 IEM_MC_COMMIT_EFLAGS(EFlags);
11737 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64RegCopy);
11738 IEM_MC_ADVANCE_RIP_AND_FINISH();
11739 IEM_MC_END();
11740 break;
11741
11742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11743 }
11744 }
11745}
11746
11747
11748/** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */
11749FNIEMOP_DEF(iemOp_cmpps_Vps_Wps_Ib)
11750{
11751 IEMOP_MNEMONIC3(RMI, CMPPS, cmpps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11752
11753 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11754 if (IEM_IS_MODRM_REG_MODE(bRm))
11755 {
11756 /*
11757 * XMM, XMM.
11758 */
11759 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
11760 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11762 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11763 IEM_MC_LOCAL(X86XMMREG, Dst);
11764 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11765 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11766 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11767 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11768 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11769 IEM_MC_PREPARE_SSE_USAGE();
11770 IEM_MC_REF_MXCSR(pfMxcsr);
11771 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11772 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11773 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11774 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11775 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11776 } IEM_MC_ELSE() {
11777 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11778 } IEM_MC_ENDIF();
11779
11780 IEM_MC_ADVANCE_RIP_AND_FINISH();
11781 IEM_MC_END();
11782 }
11783 else
11784 {
11785 /*
11786 * XMM, [mem128].
11787 */
11788 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
11789 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11790 IEM_MC_LOCAL(X86XMMREG, Dst);
11791 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11792 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11793 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11795
11796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11797 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11798 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
11800 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11801 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11802
11803 IEM_MC_PREPARE_SSE_USAGE();
11804 IEM_MC_REF_MXCSR(pfMxcsr);
11805 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11806 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpps_u128, pfMxcsr, pDst, pSrc, bImmArg);
11807 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11808 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11809 } IEM_MC_ELSE() {
11810 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11811 } IEM_MC_ENDIF();
11812
11813 IEM_MC_ADVANCE_RIP_AND_FINISH();
11814 IEM_MC_END();
11815 }
11816}
11817
11818
11819/** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */
11820FNIEMOP_DEF(iemOp_cmppd_Vpd_Wpd_Ib)
11821{
11822 IEMOP_MNEMONIC3(RMI, CMPPD, cmppd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11823
11824 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11825 if (IEM_IS_MODRM_REG_MODE(bRm))
11826 {
11827 /*
11828 * XMM, XMM.
11829 */
11830 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
11831 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11833 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11834 IEM_MC_LOCAL(X86XMMREG, Dst);
11835 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11836 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11837 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11838 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11839 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11840 IEM_MC_PREPARE_SSE_USAGE();
11841 IEM_MC_REF_MXCSR(pfMxcsr);
11842 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11843 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11844 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11845 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11846 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11847 } IEM_MC_ELSE() {
11848 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11849 } IEM_MC_ENDIF();
11850
11851 IEM_MC_ADVANCE_RIP_AND_FINISH();
11852 IEM_MC_END();
11853 }
11854 else
11855 {
11856 /*
11857 * XMM, [mem128].
11858 */
11859 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
11860 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11861 IEM_MC_LOCAL(X86XMMREG, Dst);
11862 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11863 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11864 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11866
11867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11868 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11869 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11871 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11872 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11873
11874 IEM_MC_PREPARE_SSE_USAGE();
11875 IEM_MC_REF_MXCSR(pfMxcsr);
11876 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11877 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmppd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11878 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11879 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11880 } IEM_MC_ELSE() {
11881 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
11882 } IEM_MC_ENDIF();
11883
11884 IEM_MC_ADVANCE_RIP_AND_FINISH();
11885 IEM_MC_END();
11886 }
11887}
11888
11889
11890/** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */
11891FNIEMOP_DEF(iemOp_cmpss_Vss_Wss_Ib)
11892{
11893 IEMOP_MNEMONIC3(RMI, CMPSS, cmpss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11894
11895 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11896 if (IEM_IS_MODRM_REG_MODE(bRm))
11897 {
11898 /*
11899 * XMM32, XMM32.
11900 */
11901 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
11902 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11904 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11905 IEM_MC_LOCAL(X86XMMREG, Dst);
11906 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11907 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11908 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11909 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11910 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11911 IEM_MC_PREPARE_SSE_USAGE();
11912 IEM_MC_REF_MXCSR(pfMxcsr);
11913 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11914 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11915 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11916 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11917 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11918 } IEM_MC_ELSE() {
11919 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11920 } IEM_MC_ENDIF();
11921
11922 IEM_MC_ADVANCE_RIP_AND_FINISH();
11923 IEM_MC_END();
11924 }
11925 else
11926 {
11927 /*
11928 * XMM32, [mem32].
11929 */
11930 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
11931 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11932 IEM_MC_LOCAL(X86XMMREG, Dst);
11933 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11934 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11935 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11937
11938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
11939 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11940 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11942 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11943 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11944
11945 IEM_MC_PREPARE_SSE_USAGE();
11946 IEM_MC_REF_MXCSR(pfMxcsr);
11947 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11948 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpss_u128, pfMxcsr, pDst, pSrc, bImmArg);
11949 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11950 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11951 } IEM_MC_ELSE() {
11952 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
11953 } IEM_MC_ENDIF();
11954
11955 IEM_MC_ADVANCE_RIP_AND_FINISH();
11956 IEM_MC_END();
11957 }
11958}
11959
11960
11961/** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */
11962FNIEMOP_DEF(iemOp_cmpsd_Vsd_Wsd_Ib)
11963{
11964 IEMOP_MNEMONIC3(RMI, CMPSD, cmpsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
11965
11966 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11967 if (IEM_IS_MODRM_REG_MODE(bRm))
11968 {
11969 /*
11970 * XMM64, XMM64.
11971 */
11972 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
11973 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
11975 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
11976 IEM_MC_LOCAL(X86XMMREG, Dst);
11977 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
11978 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
11979 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
11980 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
11981 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
11982 IEM_MC_PREPARE_SSE_USAGE();
11983 IEM_MC_REF_MXCSR(pfMxcsr);
11984 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
11985 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
11986 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
11987 IEM_MC_IF_MXCSR_XCPT_PENDING() {
11988 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
11989 } IEM_MC_ELSE() {
11990 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
11991 } IEM_MC_ENDIF();
11992
11993 IEM_MC_ADVANCE_RIP_AND_FINISH();
11994 IEM_MC_END();
11995 }
11996 else
11997 {
11998 /*
11999 * XMM64, [mem64].
12000 */
12001 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
12002 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
12003 IEM_MC_LOCAL(X86XMMREG, Dst);
12004 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
12005 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
12006 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
12007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12008
12009 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12010 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12011 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
12012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12013 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12014 IEM_MC_FETCH_MEM_XMM_U64(Src.uSrc2, 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12015
12016 IEM_MC_PREPARE_SSE_USAGE();
12017 IEM_MC_REF_MXCSR(pfMxcsr);
12018 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
12019 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
12020 IEM_MC_IF_MXCSR_XCPT_PENDING() {
12021 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
12022 } IEM_MC_ELSE() {
12023 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
12024 } IEM_MC_ENDIF();
12025
12026 IEM_MC_ADVANCE_RIP_AND_FINISH();
12027 IEM_MC_END();
12028 }
12029}
12030
12031
12032/** Opcode 0x0f 0xc3. */
12033FNIEMOP_DEF(iemOp_movnti_My_Gy)
12034{
12035 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy");
12036
12037 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12038
12039 /* Only the register -> memory form makes sense, assuming #UD for the other form. */
12040 if (IEM_IS_MODRM_MEM_MODE(bRm))
12041 {
12042 switch (pVCpu->iem.s.enmEffOpSize)
12043 {
12044 case IEMMODE_32BIT:
12045 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
12046 IEM_MC_LOCAL(uint32_t, u32Value);
12047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12048
12049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12051
12052 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12053 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
12054 IEM_MC_ADVANCE_RIP_AND_FINISH();
12055 IEM_MC_END();
12056 break;
12057
12058 case IEMMODE_64BIT:
12059 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
12060 IEM_MC_LOCAL(uint64_t, u64Value);
12061 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12062
12063 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12065
12066 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
12067 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
12068 IEM_MC_ADVANCE_RIP_AND_FINISH();
12069 IEM_MC_END();
12070 break;
12071
12072 case IEMMODE_16BIT:
12073 /** @todo check this form. */
12074 IEMOP_RAISE_INVALID_OPCODE_RET();
12075
12076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12077 }
12078 }
12079 else
12080 IEMOP_RAISE_INVALID_OPCODE_RET();
12081}
12082
12083
12084/* Opcode 0x66 0x0f 0xc3 - invalid */
12085/* Opcode 0xf3 0x0f 0xc3 - invalid */
12086/* Opcode 0xf2 0x0f 0xc3 - invalid */
12087
12088
12089/** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */
12090FNIEMOP_DEF(iemOp_pinsrw_Pq_RyMw_Ib)
12091{
12092 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Pq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12093 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12094 if (IEM_IS_MODRM_REG_MODE(bRm))
12095 {
12096 /*
12097 * Register, register.
12098 */
12099 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12100 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12102 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12103 IEM_MC_ARG(uint16_t, u16Src, 1);
12104 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12105 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12106 IEM_MC_PREPARE_FPU_USAGE();
12107 IEM_MC_FPU_TO_MMX_MODE();
12108 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
12109 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
12110 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
12111 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
12112 IEM_MC_ADVANCE_RIP_AND_FINISH();
12113 IEM_MC_END();
12114 }
12115 else
12116 {
12117 /*
12118 * Register, memory.
12119 */
12120 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
12121 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12122 IEM_MC_ARG(uint16_t, u16Src, 1);
12123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12124
12125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12126 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12127 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12129 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12130 IEM_MC_PREPARE_FPU_USAGE();
12131 IEM_MC_FPU_TO_MMX_MODE();
12132
12133 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12134 IEM_MC_REF_MREG_U64(pu64Dst, IEM_GET_MODRM_REG_8(bRm));
12135 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u64, pu64Dst, u16Src, bImmArg);
12136 IEM_MC_MODIFIED_MREG_BY_REF(pu64Dst);
12137 IEM_MC_ADVANCE_RIP_AND_FINISH();
12138 IEM_MC_END();
12139 }
12140}
12141
12142
12143/** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */
12144FNIEMOP_DEF(iemOp_pinsrw_Vdq_RyMw_Ib)
12145{
12146 IEMOP_MNEMONIC3(RMI, PINSRW, pinsrw, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12147 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12148 if (IEM_IS_MODRM_REG_MODE(bRm))
12149 {
12150 /*
12151 * Register, register.
12152 */
12153 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12154 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12156 IEM_MC_ARG(PRTUINT128U, puDst, 0);
12157 IEM_MC_ARG(uint16_t, u16Src, 1);
12158 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12159 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12160 IEM_MC_PREPARE_SSE_USAGE();
12161 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
12162 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12163 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
12164 IEM_MC_ADVANCE_RIP_AND_FINISH();
12165 IEM_MC_END();
12166 }
12167 else
12168 {
12169 /*
12170 * Register, memory.
12171 */
12172 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12173 IEM_MC_ARG(PRTUINT128U, puDst, 0);
12174 IEM_MC_ARG(uint16_t, u16Src, 1);
12175 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12176
12177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12178 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12179 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12181 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12182 IEM_MC_PREPARE_SSE_USAGE();
12183
12184 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12185 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12186 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pinsrw_u128, puDst, u16Src, bImmArg);
12187 IEM_MC_ADVANCE_RIP_AND_FINISH();
12188 IEM_MC_END();
12189 }
12190}
12191
12192
12193/* Opcode 0xf3 0x0f 0xc4 - invalid */
12194/* Opcode 0xf2 0x0f 0xc4 - invalid */
12195
12196
12197/** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */
12198FNIEMOP_DEF(iemOp_pextrw_Gd_Nq_Ib)
12199{
12200 /*IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Nq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);*/ /** @todo */
12201 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12202 if (IEM_IS_MODRM_REG_MODE(bRm))
12203 {
12204 /*
12205 * Greg32, MMX, imm8.
12206 */
12207 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
12208 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
12210 IEM_MC_LOCAL(uint16_t, u16Dst);
12211 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
12212 IEM_MC_ARG(uint64_t, u64Src, 1);
12213 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12214 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
12215 IEM_MC_PREPARE_FPU_USAGE();
12216 IEM_MC_FPU_TO_MMX_MODE();
12217 IEM_MC_FETCH_MREG_U64(u64Src, IEM_GET_MODRM_RM_8(bRm));
12218 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u64, pu16Dst, u64Src, bImmArg);
12219 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12220 IEM_MC_ADVANCE_RIP_AND_FINISH();
12221 IEM_MC_END();
12222 }
12223 /* No memory operand. */
12224 else
12225 IEMOP_RAISE_INVALID_OPCODE_RET();
12226}
12227
12228
12229/** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */
12230FNIEMOP_DEF(iemOp_pextrw_Gd_Udq_Ib)
12231{
12232 IEMOP_MNEMONIC3(RMI_REG, PEXTRW, pextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12234 if (IEM_IS_MODRM_REG_MODE(bRm))
12235 {
12236 /*
12237 * Greg32, XMM, imm8.
12238 */
12239 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
12240 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12242 IEM_MC_LOCAL(uint16_t, u16Dst);
12243 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
12244 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
12245 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12246 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12247 IEM_MC_PREPARE_SSE_USAGE();
12248 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12249 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_pextrw_u128, pu16Dst, puSrc, bImmArg);
12250 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
12251 IEM_MC_ADVANCE_RIP_AND_FINISH();
12252 IEM_MC_END();
12253 }
12254 /* No memory operand. */
12255 else
12256 IEMOP_RAISE_INVALID_OPCODE_RET();
12257}
12258
12259
12260/* Opcode 0xf3 0x0f 0xc5 - invalid */
12261/* Opcode 0xf2 0x0f 0xc5 - invalid */
12262
12263
12264/** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */
12265FNIEMOP_DEF(iemOp_shufps_Vps_Wps_Ib)
12266{
12267 IEMOP_MNEMONIC3(RMI, SHUFPS, shufps, Vps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12269 if (IEM_IS_MODRM_REG_MODE(bRm))
12270 {
12271 /*
12272 * XMM, XMM, imm8.
12273 */
12274 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12275 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12277 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12278 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12279 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12280 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12281 IEM_MC_PREPARE_SSE_USAGE();
12282 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12283 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12284 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12285 IEM_MC_ADVANCE_RIP_AND_FINISH();
12286 IEM_MC_END();
12287 }
12288 else
12289 {
12290 /*
12291 * XMM, [mem128], imm8.
12292 */
12293 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12294 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12295 IEM_MC_LOCAL(RTUINT128U, uSrc);
12296 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12298
12299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12300 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12301 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse);
12303 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12304 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12305
12306 IEM_MC_PREPARE_SSE_USAGE();
12307 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12308 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufps_u128, pDst, pSrc, bImmArg);
12309
12310 IEM_MC_ADVANCE_RIP_AND_FINISH();
12311 IEM_MC_END();
12312 }
12313}
12314
12315
12316/** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */
12317FNIEMOP_DEF(iemOp_shufpd_Vpd_Wpd_Ib)
12318{
12319 IEMOP_MNEMONIC3(RMI, SHUFPD, shufpd, Vpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12320 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12321 if (IEM_IS_MODRM_REG_MODE(bRm))
12322 {
12323 /*
12324 * XMM, XMM, imm8.
12325 */
12326 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12327 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12329 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12330 IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
12331 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12332 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12333 IEM_MC_PREPARE_SSE_USAGE();
12334 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12335 IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
12336 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12337 IEM_MC_ADVANCE_RIP_AND_FINISH();
12338 IEM_MC_END();
12339 }
12340 else
12341 {
12342 /*
12343 * XMM, [mem128], imm8.
12344 */
12345 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12346 IEM_MC_ARG(PRTUINT128U, pDst, 0);
12347 IEM_MC_LOCAL(RTUINT128U, uSrc);
12348 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
12349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12350
12351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
12352 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12353 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
12354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12355 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12356 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12357
12358 IEM_MC_PREPARE_SSE_USAGE();
12359 IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
12360 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_shufpd_u128, pDst, pSrc, bImmArg);
12361
12362 IEM_MC_ADVANCE_RIP_AND_FINISH();
12363 IEM_MC_END();
12364 }
12365}
12366
12367
12368/* Opcode 0xf3 0x0f 0xc6 - invalid */
12369/* Opcode 0xf2 0x0f 0xc6 - invalid */
12370
12371
12372/** Opcode 0x0f 0xc7 !11/1. */
12373FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
12374{
12375 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq");
12376
12377 IEM_MC_BEGIN(4, 5, IEM_MC_F_NOT_286_OR_OLDER, 0);
12378 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
12379 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
12380 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
12381 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
12382 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
12383 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
12384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12385 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12386
12387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12388 IEMOP_HLP_DONE_DECODING_EX(fCmpXchg8b);
12389 IEM_MC_MEM_MAP_U64_RW(pu64MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12390
12391 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
12392 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
12393 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
12394
12395 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
12396 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
12397 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
12398
12399 IEM_MC_FETCH_EFLAGS(EFlags);
12400 if ( !(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)
12401 && (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12402 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12403 else
12404 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
12405
12406 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64MemDst, bUnmapInfo);
12407 IEM_MC_COMMIT_EFLAGS(EFlags);
12408 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12409 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
12410 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
12411 } IEM_MC_ENDIF();
12412 IEM_MC_ADVANCE_RIP_AND_FINISH();
12413
12414 IEM_MC_END();
12415}
12416
12417
12418/** Opcode REX.W 0x0f 0xc7 !11/1. */
12419FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm)
12420{
12421 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq");
12422 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12423 {
12424 /*
12425 * This is hairy, very hairy macro fun. We're walking a fine line
12426 * here to make the code parsable by IEMAllInstPython.py and fit into
12427 * the patterns IEMAllThrdPython.py requires for the code morphing.
12428 */
12429#define BODY_CMPXCHG16B_HEAD \
12430 IEM_MC_BEGIN(4, 3, IEM_MC_F_64BIT, 0); \
12431 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0); \
12432 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1); \
12433 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2); \
12434 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3); \
12435 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx); \
12436 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx); \
12437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12438 \
12439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12440 IEMOP_HLP_DONE_DECODING(); \
12441 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16); \
12442 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
12443 \
12444 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX); \
12445 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX); \
12446 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx); \
12447 \
12448 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX); \
12449 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX); \
12450 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx); \
12451 \
12452 IEM_MC_FETCH_EFLAGS(EFlags)
12453
12454#define BODY_CMPXCHG16B_TAIL \
12455 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW); \
12456 IEM_MC_COMMIT_EFLAGS(EFlags); \
12457 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \
12458 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo); \
12459 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi); \
12460 } IEM_MC_ENDIF(); \
12461 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12462 IEM_MC_END()
12463
12464#ifdef RT_ARCH_AMD64
12465 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fCmpXchg16b)
12466 {
12467 if ( !(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)
12468 && (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12469 {
12470 BODY_CMPXCHG16B_HEAD;
12471 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12472 BODY_CMPXCHG16B_TAIL;
12473 }
12474 else
12475 {
12476 BODY_CMPXCHG16B_HEAD;
12477 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12478 BODY_CMPXCHG16B_TAIL;
12479 }
12480 }
12481 else
12482 { /* (see comments in #else case below) */
12483 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12484 {
12485 BODY_CMPXCHG16B_HEAD;
12486 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12487 BODY_CMPXCHG16B_TAIL;
12488 }
12489 else
12490 {
12491 BODY_CMPXCHG16B_HEAD;
12492 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS,
12493 iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12494 IEM_MC_END();
12495 }
12496 }
12497
12498#elif defined(RT_ARCH_ARM64)
12499 /** @todo may require fallback for unaligned accesses... */
12500 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
12501 {
12502 BODY_CMPXCHG16B_HEAD;
12503 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12504 BODY_CMPXCHG16B_TAIL;
12505 }
12506 else
12507 {
12508 BODY_CMPXCHG16B_HEAD;
12509 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12510 BODY_CMPXCHG16B_TAIL;
12511 }
12512
12513#else
12514 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple
12515 accesses and not all all atomic, which works fine on in UNI CPU guest
12516 configuration (ignoring DMA). If guest SMP is active we have no choice
12517 but to use a rendezvous callback here. Sigh. */
12518 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1)
12519 {
12520 BODY_CMPXCHG16B_HEAD;
12521 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12522 BODY_CMPXCHG16B_TAIL;
12523 }
12524 else
12525 {
12526 BODY_CMPXCHG16B_HEAD;
12527 IEM_MC_CALL_CIMPL_4(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_cmpxchg16b_fallback_rendezvous,
12528 pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags);
12529 IEM_MC_END();
12530 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */
12531 }
12532#endif
12533
12534#undef BODY_CMPXCHG16B
12535 }
12536 Log(("cmpxchg16b -> #UD\n"));
12537 IEMOP_RAISE_INVALID_OPCODE_RET();
12538}
12539
12540FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm)
12541{
12542 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
12543 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
12544 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
12545}
12546
12547
12548/** Opcode 0x0f 0xc7 11/6. */
12549FNIEMOP_DEF_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm)
12550{
12551 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdRand)
12552 IEMOP_RAISE_INVALID_OPCODE_RET();
12553
12554 if (IEM_IS_MODRM_REG_MODE(bRm))
12555 {
12556 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12558 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12559 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 1);
12560 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, iemCImpl_rdrand, iReg, enmEffOpSize);
12561 IEM_MC_END();
12562 }
12563 /* Register only. */
12564 else
12565 IEMOP_RAISE_INVALID_OPCODE_RET();
12566}
12567
12568/** Opcode 0x0f 0xc7 !11/6. */
12569#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12570FNIEMOP_DEF_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm)
12571{
12572 IEMOP_MNEMONIC(vmptrld, "vmptrld");
12573 IEMOP_HLP_IN_VMX_OPERATION("vmptrld", kVmxVDiag_Vmptrld);
12574 IEMOP_HLP_VMX_INSTR("vmptrld", kVmxVDiag_Vmptrld);
12575 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12576 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12578 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12579 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12580 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmptrld, iEffSeg, GCPtrEffSrc);
12581 IEM_MC_END();
12582}
12583#else
12584FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
12585#endif
12586
12587/** Opcode 0x66 0x0f 0xc7 !11/6. */
12588#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12589FNIEMOP_DEF_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm)
12590{
12591 IEMOP_MNEMONIC(vmclear, "vmclear");
12592 IEMOP_HLP_IN_VMX_OPERATION("vmclear", kVmxVDiag_Vmclear);
12593 IEMOP_HLP_VMX_INSTR("vmclear", kVmxVDiag_Vmclear);
12594 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12595 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12597 IEMOP_HLP_DONE_DECODING();
12598 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12599 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmclear, iEffSeg, GCPtrEffDst);
12600 IEM_MC_END();
12601}
12602#else
12603FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
12604#endif
12605
12606/** Opcode 0xf3 0x0f 0xc7 !11/6. */
12607#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12608FNIEMOP_DEF_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm)
12609{
12610 IEMOP_MNEMONIC(vmxon, "vmxon");
12611 IEMOP_HLP_VMX_INSTR("vmxon", kVmxVDiag_Vmxon);
12612 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12613 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
12614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12615 IEMOP_HLP_DONE_DECODING();
12616 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12617 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmxon, iEffSeg, GCPtrEffSrc);
12618 IEM_MC_END();
12619}
12620#else
12621FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
12622#endif
12623
12624/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
12625#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
12626FNIEMOP_DEF_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm)
12627{
12628 IEMOP_MNEMONIC(vmptrst, "vmptrst");
12629 IEMOP_HLP_IN_VMX_OPERATION("vmptrst", kVmxVDiag_Vmptrst);
12630 IEMOP_HLP_VMX_INSTR("vmptrst", kVmxVDiag_Vmptrst);
12631 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12632 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
12633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12634 IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES();
12635 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
12636 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_vmptrst, iEffSeg, GCPtrEffDst);
12637 IEM_MC_END();
12638}
12639#else
12640FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
12641#endif
12642
12643/** Opcode 0x0f 0xc7 11/7. */
12644FNIEMOP_DEF_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm)
12645{
12646 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fRdSeed)
12647 IEMOP_RAISE_INVALID_OPCODE_RET();
12648
12649 if (IEM_IS_MODRM_REG_MODE(bRm))
12650 {
12651 /* register destination. */
12652 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
12653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12654 IEM_MC_ARG_CONST(uint8_t, iReg, /*=*/IEM_GET_MODRM_RM(pVCpu, bRm), 0);
12655 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 1);
12656 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, iemCImpl_rdseed, iReg, enmEffOpSize);
12657 IEM_MC_END();
12658 }
12659 /* Register only. */
12660 else
12661 IEMOP_RAISE_INVALID_OPCODE_RET();
12662}
12663
12664/**
12665 * Group 9 jump table for register variant.
12666 */
12667IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] =
12668{ /* pfx: none, 066h, 0f3h, 0f2h */
12669 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12670 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM),
12671 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12672 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12673 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12674 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12675 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12676 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12677};
12678AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4);
12679
12680
12681/**
12682 * Group 9 jump table for memory variant.
12683 */
12684IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] =
12685{ /* pfx: none, 066h, 0f3h, 0f2h */
12686 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM),
12687 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */
12688 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM),
12689 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM),
12690 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),
12691 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM),
12692 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM,
12693 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
12694};
12695AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4);
12696
12697
12698/** Opcode 0x0f 0xc7. */
12699FNIEMOP_DEF(iemOp_Grp9)
12700{
12701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12702 if (IEM_IS_MODRM_REG_MODE(bRm))
12703 /* register, register */
12704 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12705 + pVCpu->iem.s.idxPrefix], bRm);
12706 /* memory, register */
12707 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
12708 + pVCpu->iem.s.idxPrefix], bRm);
12709}
12710
12711
12712/**
12713 * Common 'bswap register' helper.
12714 */
12715FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
12716{
12717 switch (pVCpu->iem.s.enmEffOpSize)
12718 {
12719 case IEMMODE_16BIT:
12720 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_486, 0);
12721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12722 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12723 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
12724 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
12725 IEM_MC_ADVANCE_RIP_AND_FINISH();
12726 IEM_MC_END();
12727 break;
12728
12729 case IEMMODE_32BIT:
12730 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_486, 0);
12731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12732 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12733 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
12734 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
12735 IEM_MC_CLEAR_HIGH_GREG_U64(iReg);
12736 IEM_MC_ADVANCE_RIP_AND_FINISH();
12737 IEM_MC_END();
12738 break;
12739
12740 case IEMMODE_64BIT:
12741 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
12742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12743 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12744 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
12745 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
12746 IEM_MC_ADVANCE_RIP_AND_FINISH();
12747 IEM_MC_END();
12748 break;
12749
12750 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12751 }
12752}
12753
12754
12755/** Opcode 0x0f 0xc8. */
12756FNIEMOP_DEF(iemOp_bswap_rAX_r8)
12757{
12758 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8");
12759 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
12760 prefix. REX.B is the correct prefix it appears. For a parallel
12761 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
12762 IEMOP_HLP_MIN_486();
12763 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB);
12764}
12765
12766
12767/** Opcode 0x0f 0xc9. */
12768FNIEMOP_DEF(iemOp_bswap_rCX_r9)
12769{
12770 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9");
12771 IEMOP_HLP_MIN_486();
12772 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB);
12773}
12774
12775
12776/** Opcode 0x0f 0xca. */
12777FNIEMOP_DEF(iemOp_bswap_rDX_r10)
12778{
12779 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r10");
12780 IEMOP_HLP_MIN_486();
12781 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB);
12782}
12783
12784
12785/** Opcode 0x0f 0xcb. */
12786FNIEMOP_DEF(iemOp_bswap_rBX_r11)
12787{
12788 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r11");
12789 IEMOP_HLP_MIN_486();
12790 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB);
12791}
12792
12793
12794/** Opcode 0x0f 0xcc. */
12795FNIEMOP_DEF(iemOp_bswap_rSP_r12)
12796{
12797 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12");
12798 IEMOP_HLP_MIN_486();
12799 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB);
12800}
12801
12802
12803/** Opcode 0x0f 0xcd. */
12804FNIEMOP_DEF(iemOp_bswap_rBP_r13)
12805{
12806 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13");
12807 IEMOP_HLP_MIN_486();
12808 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB);
12809}
12810
12811
12812/** Opcode 0x0f 0xce. */
12813FNIEMOP_DEF(iemOp_bswap_rSI_r14)
12814{
12815 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14");
12816 IEMOP_HLP_MIN_486();
12817 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB);
12818}
12819
12820
12821/** Opcode 0x0f 0xcf. */
12822FNIEMOP_DEF(iemOp_bswap_rDI_r15)
12823{
12824 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15");
12825 IEMOP_HLP_MIN_486();
12826 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB);
12827}
12828
12829
12830/* Opcode 0x0f 0xd0 - invalid */
12831
12832
12833/** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */
12834FNIEMOP_DEF(iemOp_addsubpd_Vpd_Wpd)
12835{
12836 IEMOP_MNEMONIC2(RM, ADDSUBPD, addsubpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12837 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubpd_u128);
12838}
12839
12840
12841/* Opcode 0xf3 0x0f 0xd0 - invalid */
12842
12843
12844/** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */
12845FNIEMOP_DEF(iemOp_addsubps_Vps_Wps)
12846{
12847 IEMOP_MNEMONIC2(RM, ADDSUBPS, addsubps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12848 return FNIEMOP_CALL_1(iemOpCommonSse3Fp_FullFull_To_Full, iemAImpl_addsubps_u128);
12849}
12850
12851
12852
12853/** Opcode 0x0f 0xd1 - psrlw Pq, Qq */
12854FNIEMOP_DEF(iemOp_psrlw_Pq_Qq)
12855{
12856 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12857 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlw_u64);
12858}
12859
12860/** Opcode 0x66 0x0f 0xd1 - psrlw Vx, Wx */
12861FNIEMOP_DEF(iemOp_psrlw_Vx_Wx)
12862{
12863 IEMOP_MNEMONIC2(RM, PSRLW, psrlw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12864 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlw_u128);
12865}
12866
12867/* Opcode 0xf3 0x0f 0xd1 - invalid */
12868/* Opcode 0xf2 0x0f 0xd1 - invalid */
12869
12870/** Opcode 0x0f 0xd2 - psrld Pq, Qq */
12871FNIEMOP_DEF(iemOp_psrld_Pq_Qq)
12872{
12873 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
12874 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrld_u64);
12875}
12876
12877
12878/** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */
12879FNIEMOP_DEF(iemOp_psrld_Vx_Wx)
12880{
12881 IEMOP_MNEMONIC2(RM, PSRLD, psrld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12882 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrld_u128);
12883}
12884
12885
12886/* Opcode 0xf3 0x0f 0xd2 - invalid */
12887/* Opcode 0xf2 0x0f 0xd2 - invalid */
12888
12889/** Opcode 0x0f 0xd3 - psrlq Pq, Qq */
12890FNIEMOP_DEF(iemOp_psrlq_Pq_Qq)
12891{
12892 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
12893 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrlq_u64);
12894}
12895
12896
12897/** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */
12898FNIEMOP_DEF(iemOp_psrlq_Vx_Wx)
12899{
12900 IEMOP_MNEMONIC2(RM, PSRLQ, psrlq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
12901 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrlq_u128);
12902}
12903
12904
12905/* Opcode 0xf3 0x0f 0xd3 - invalid */
12906/* Opcode 0xf2 0x0f 0xd3 - invalid */
12907
12908
12909/** Opcode 0x0f 0xd4 - paddq Pq, Qq */
12910FNIEMOP_DEF(iemOp_paddq_Pq_Qq)
12911{
12912 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12913 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_paddq_u64);
12914}
12915
12916
12917/** Opcode 0x66 0x0f 0xd4 - paddq Vx, Wx */
12918FNIEMOP_DEF(iemOp_paddq_Vx_Wx)
12919{
12920 IEMOP_MNEMONIC2(RM, PADDQ, paddq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12921 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddq_u128);
12922}
12923
12924
12925/* Opcode 0xf3 0x0f 0xd4 - invalid */
12926/* Opcode 0xf2 0x0f 0xd4 - invalid */
12927
12928/** Opcode 0x0f 0xd5 - pmullw Pq, Qq */
12929FNIEMOP_DEF(iemOp_pmullw_Pq_Qq)
12930{
12931 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
12932 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmullw_u64);
12933}
12934
12935/** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */
12936FNIEMOP_DEF(iemOp_pmullw_Vx_Wx)
12937{
12938 IEMOP_MNEMONIC2(RM, PMULLW, pmullw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12939 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmullw_u128);
12940}
12941
12942
12943/* Opcode 0xf3 0x0f 0xd5 - invalid */
12944/* Opcode 0xf2 0x0f 0xd5 - invalid */
12945
12946/* Opcode 0x0f 0xd6 - invalid */
12947
12948/**
12949 * @opcode 0xd6
12950 * @oppfx 0x66
12951 * @opcpuid sse2
12952 * @opgroup og_sse2_pcksclr_datamove
12953 * @opxcpttype none
12954 * @optest op1=-1 op2=2 -> op1=2
12955 * @optest op1=0 op2=-42 -> op1=-42
12956 */
12957FNIEMOP_DEF(iemOp_movq_Wq_Vq)
12958{
12959 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
12960 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12961 if (IEM_IS_MODRM_REG_MODE(bRm))
12962 {
12963 /*
12964 * Register, register.
12965 */
12966 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12968 IEM_MC_LOCAL(uint64_t, uSrc);
12969
12970 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12971 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
12972
12973 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12974 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
12975
12976 IEM_MC_ADVANCE_RIP_AND_FINISH();
12977 IEM_MC_END();
12978 }
12979 else
12980 {
12981 /*
12982 * Memory, register.
12983 */
12984 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
12985 IEM_MC_LOCAL(uint64_t, uSrc);
12986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12987
12988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
12990 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
12991 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
12992
12993 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/);
12994 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
12995
12996 IEM_MC_ADVANCE_RIP_AND_FINISH();
12997 IEM_MC_END();
12998 }
12999}
13000
13001
13002/**
13003 * @opcode 0xd6
13004 * @opcodesub 11 mr/reg
13005 * @oppfx f3
13006 * @opcpuid sse2
13007 * @opgroup og_sse2_simdint_datamove
13008 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13009 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13010 */
13011FNIEMOP_DEF(iemOp_movq2dq_Vdq_Nq)
13012{
13013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13014 if (IEM_IS_MODRM_REG_MODE(bRm))
13015 {
13016 /*
13017 * Register, register.
13018 */
13019 IEMOP_MNEMONIC2(RM_REG, MOVQ2DQ, movq2dq, VqZx_WO, Nq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13020 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
13021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13022 IEM_MC_LOCAL(uint64_t, uSrc);
13023
13024 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13025 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13026 IEM_MC_FPU_TO_MMX_MODE();
13027
13028 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
13029 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
13030
13031 IEM_MC_ADVANCE_RIP_AND_FINISH();
13032 IEM_MC_END();
13033 }
13034
13035 /**
13036 * @opdone
13037 * @opmnemonic udf30fd6mem
13038 * @opcode 0xd6
13039 * @opcodesub !11 mr/reg
13040 * @oppfx f3
13041 * @opunused intel-modrm
13042 * @opcpuid sse
13043 * @optest ->
13044 */
13045 else
13046 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13047}
13048
13049
13050/**
13051 * @opcode 0xd6
13052 * @opcodesub 11 mr/reg
13053 * @oppfx f2
13054 * @opcpuid sse2
13055 * @opgroup og_sse2_simdint_datamove
13056 * @optest op1=1 op2=2 -> op1=2 ftw=0xff
13057 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13058 * @optest op1=0 op2=0x1123456789abcdef -> op1=0x1123456789abcdef ftw=0xff
13059 * @optest op1=0 op2=0xfedcba9876543210 -> op1=0xfedcba9876543210 ftw=0xff
13060 * @optest op1=-42 op2=0xfedcba9876543210
13061 * -> op1=0xfedcba9876543210 ftw=0xff
13062 */
13063FNIEMOP_DEF(iemOp_movdq2q_Pq_Uq)
13064{
13065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13066 if (IEM_IS_MODRM_REG_MODE(bRm))
13067 {
13068 /*
13069 * Register, register.
13070 */
13071 IEMOP_MNEMONIC2(RM_REG, MOVDQ2Q, movdq2q, Pq_WO, Uq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13072 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
13073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13074 IEM_MC_LOCAL(uint64_t, uSrc);
13075
13076 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13077 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13078 IEM_MC_FPU_TO_MMX_MODE();
13079
13080 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
13081 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc);
13082
13083 IEM_MC_ADVANCE_RIP_AND_FINISH();
13084 IEM_MC_END();
13085 }
13086
13087 /**
13088 * @opdone
13089 * @opmnemonic udf20fd6mem
13090 * @opcode 0xd6
13091 * @opcodesub !11 mr/reg
13092 * @oppfx f2
13093 * @opunused intel-modrm
13094 * @opcpuid sse
13095 * @optest ->
13096 */
13097 else
13098 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedDecode, bRm);
13099}
13100
13101
13102/** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */
13103FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq)
13104{
13105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13106 /* Docs says register only. */
13107 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13108 {
13109 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13110 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Nq, DISOPTYPE_X86_MMX | DISOPTYPE_HARMLESS, 0);
13111 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
13112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(fSse, fAmdMmxExts);
13113 IEM_MC_ARG(uint64_t *, puDst, 0);
13114 IEM_MC_ARG(uint64_t const *, puSrc, 1);
13115 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13116 IEM_MC_PREPARE_FPU_USAGE();
13117 IEM_MC_FPU_TO_MMX_MODE();
13118
13119 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13120 IEM_MC_REF_MREG_U64_CONST(puSrc, IEM_GET_MODRM_RM_8(bRm));
13121 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u64, puDst, puSrc);
13122
13123 IEM_MC_ADVANCE_RIP_AND_FINISH();
13124 IEM_MC_END();
13125 }
13126 else
13127 IEMOP_RAISE_INVALID_OPCODE_RET();
13128}
13129
13130
13131/** Opcode 0x66 0x0f 0xd7 - */
13132FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux)
13133{
13134 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13135 /* Docs says register only. */
13136 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
13137 {
13138 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
13139 IEMOP_MNEMONIC2(RM_REG, PMOVMSKB, pmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
13140 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
13141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13142 IEM_MC_ARG(uint64_t *, puDst, 0);
13143 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
13144 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13145 IEM_MC_PREPARE_SSE_USAGE();
13146 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
13147 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
13148 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
13149 IEM_MC_ADVANCE_RIP_AND_FINISH();
13150 IEM_MC_END();
13151 }
13152 else
13153 IEMOP_RAISE_INVALID_OPCODE_RET();
13154}
13155
13156
13157/* Opcode 0xf3 0x0f 0xd7 - invalid */
13158/* Opcode 0xf2 0x0f 0xd7 - invalid */
13159
13160
13161/** Opcode 0x0f 0xd8 - psubusb Pq, Qq */
13162FNIEMOP_DEF(iemOp_psubusb_Pq_Qq)
13163{
13164 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13165 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusb_u64);
13166}
13167
13168
13169/** Opcode 0x66 0x0f 0xd8 - psubusb Vx, Wx */
13170FNIEMOP_DEF(iemOp_psubusb_Vx_Wx)
13171{
13172 IEMOP_MNEMONIC2(RM, PSUBUSB, psubusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13173 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusb_u128);
13174}
13175
13176
13177/* Opcode 0xf3 0x0f 0xd8 - invalid */
13178/* Opcode 0xf2 0x0f 0xd8 - invalid */
13179
13180/** Opcode 0x0f 0xd9 - psubusw Pq, Qq */
13181FNIEMOP_DEF(iemOp_psubusw_Pq_Qq)
13182{
13183 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13184 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubusw_u64);
13185}
13186
13187
13188/** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */
13189FNIEMOP_DEF(iemOp_psubusw_Vx_Wx)
13190{
13191 IEMOP_MNEMONIC2(RM, PSUBUSW, psubusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13192 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubusw_u128);
13193}
13194
13195
13196/* Opcode 0xf3 0x0f 0xd9 - invalid */
13197/* Opcode 0xf2 0x0f 0xd9 - invalid */
13198
13199/** Opcode 0x0f 0xda - pminub Pq, Qq */
13200FNIEMOP_DEF(iemOp_pminub_Pq_Qq)
13201{
13202 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13203 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminub_u64);
13204}
13205
13206
13207/** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */
13208FNIEMOP_DEF(iemOp_pminub_Vx_Wx)
13209{
13210 IEMOP_MNEMONIC2(RM, PMINUB, pminub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13211 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminub_u128);
13212}
13213
13214/* Opcode 0xf3 0x0f 0xda - invalid */
13215/* Opcode 0xf2 0x0f 0xda - invalid */
13216
13217/** Opcode 0x0f 0xdb - pand Pq, Qq */
13218FNIEMOP_DEF(iemOp_pand_Pq_Qq)
13219{
13220 IEMOP_MNEMONIC2(RM, PAND, pand, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13221 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pand_u64);
13222}
13223
13224
13225/** Opcode 0x66 0x0f 0xdb - pand Vx, Wx */
13226FNIEMOP_DEF(iemOp_pand_Vx_Wx)
13227{
13228 IEMOP_MNEMONIC2(RM, PAND, pand, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13229 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pand_u128);
13230}
13231
13232
13233/* Opcode 0xf3 0x0f 0xdb - invalid */
13234/* Opcode 0xf2 0x0f 0xdb - invalid */
13235
13236/** Opcode 0x0f 0xdc - paddusb Pq, Qq */
13237FNIEMOP_DEF(iemOp_paddusb_Pq_Qq)
13238{
13239 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13240 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusb_u64);
13241}
13242
13243
13244/** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */
13245FNIEMOP_DEF(iemOp_paddusb_Vx_Wx)
13246{
13247 IEMOP_MNEMONIC2(RM, PADDUSB, paddusb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13248 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusb_u128);
13249}
13250
13251
13252/* Opcode 0xf3 0x0f 0xdc - invalid */
13253/* Opcode 0xf2 0x0f 0xdc - invalid */
13254
13255/** Opcode 0x0f 0xdd - paddusw Pq, Qq */
13256FNIEMOP_DEF(iemOp_paddusw_Pq_Qq)
13257{
13258 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13259 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddusw_u64);
13260}
13261
13262
13263/** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */
13264FNIEMOP_DEF(iemOp_paddusw_Vx_Wx)
13265{
13266 IEMOP_MNEMONIC2(RM, PADDUSW, paddusw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13267 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddusw_u128);
13268}
13269
13270
13271/* Opcode 0xf3 0x0f 0xdd - invalid */
13272/* Opcode 0xf2 0x0f 0xdd - invalid */
13273
13274/** Opcode 0x0f 0xde - pmaxub Pq, Qq */
13275FNIEMOP_DEF(iemOp_pmaxub_Pq_Qq)
13276{
13277 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13278 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxub_u64);
13279}
13280
13281
13282/** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */
13283FNIEMOP_DEF(iemOp_pmaxub_Vx_Wx)
13284{
13285 IEMOP_MNEMONIC2(RM, PMAXUB, pmaxub, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13286 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxub_u128);
13287}
13288
13289/* Opcode 0xf3 0x0f 0xde - invalid */
13290/* Opcode 0xf2 0x0f 0xde - invalid */
13291
13292
13293/** Opcode 0x0f 0xdf - pandn Pq, Qq */
13294FNIEMOP_DEF(iemOp_pandn_Pq_Qq)
13295{
13296 IEMOP_MNEMONIC2(RM, PANDN, pandn, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13297 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pandn_u64);
13298}
13299
13300
13301/** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */
13302FNIEMOP_DEF(iemOp_pandn_Vx_Wx)
13303{
13304 IEMOP_MNEMONIC2(RM, PANDN, pandn, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13305 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pandn_u128);
13306}
13307
13308
13309/* Opcode 0xf3 0x0f 0xdf - invalid */
13310/* Opcode 0xf2 0x0f 0xdf - invalid */
13311
13312/** Opcode 0x0f 0xe0 - pavgb Pq, Qq */
13313FNIEMOP_DEF(iemOp_pavgb_Pq_Qq)
13314{
13315 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13316 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgb_u64);
13317}
13318
13319
13320/** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */
13321FNIEMOP_DEF(iemOp_pavgb_Vx_Wx)
13322{
13323 IEMOP_MNEMONIC2(RM, PAVGB, pavgb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13324 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgb_u128);
13325}
13326
13327
13328/* Opcode 0xf3 0x0f 0xe0 - invalid */
13329/* Opcode 0xf2 0x0f 0xe0 - invalid */
13330
13331/** Opcode 0x0f 0xe1 - psraw Pq, Qq */
13332FNIEMOP_DEF(iemOp_psraw_Pq_Qq)
13333{
13334 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13335 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psraw_u64);
13336}
13337
13338
13339/** Opcode 0x66 0x0f 0xe1 - psraw Vx, Wx */
13340FNIEMOP_DEF(iemOp_psraw_Vx_Wx)
13341{
13342 IEMOP_MNEMONIC2(RM, PSRAW, psraw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13343 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psraw_u128);
13344}
13345
13346
13347/* Opcode 0xf3 0x0f 0xe1 - invalid */
13348/* Opcode 0xf2 0x0f 0xe1 - invalid */
13349
13350/** Opcode 0x0f 0xe2 - psrad Pq, Qq */
13351FNIEMOP_DEF(iemOp_psrad_Pq_Qq)
13352{
13353 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13354 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psrad_u64);
13355}
13356
13357
13358/** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */
13359FNIEMOP_DEF(iemOp_psrad_Vx_Wx)
13360{
13361 IEMOP_MNEMONIC2(RM, PSRAD, psrad, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13362 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psrad_u128);
13363}
13364
13365
13366/* Opcode 0xf3 0x0f 0xe2 - invalid */
13367/* Opcode 0xf2 0x0f 0xe2 - invalid */
13368
13369/** Opcode 0x0f 0xe3 - pavgw Pq, Qq */
13370FNIEMOP_DEF(iemOp_pavgw_Pq_Qq)
13371{
13372 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13373 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pavgw_u64);
13374}
13375
13376
13377/** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */
13378FNIEMOP_DEF(iemOp_pavgw_Vx_Wx)
13379{
13380 IEMOP_MNEMONIC2(RM, PAVGW, pavgw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13381 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pavgw_u128);
13382}
13383
13384
13385/* Opcode 0xf3 0x0f 0xe3 - invalid */
13386/* Opcode 0xf2 0x0f 0xe3 - invalid */
13387
13388/** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */
13389FNIEMOP_DEF(iemOp_pmulhuw_Pq_Qq)
13390{
13391 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13392 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_pmulhuw_u64);
13393}
13394
13395
13396/** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, Wx */
13397FNIEMOP_DEF(iemOp_pmulhuw_Vx_Wx)
13398{
13399 IEMOP_MNEMONIC2(RM, PMULHUW, pmulhuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13400 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pmulhuw_u128);
13401}
13402
13403
13404/* Opcode 0xf3 0x0f 0xe4 - invalid */
13405/* Opcode 0xf2 0x0f 0xe4 - invalid */
13406
13407/** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */
13408FNIEMOP_DEF(iemOp_pmulhw_Pq_Qq)
13409{
13410 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13411 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmulhw_u64);
13412}
13413
13414
13415/** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */
13416FNIEMOP_DEF(iemOp_pmulhw_Vx_Wx)
13417{
13418 IEMOP_MNEMONIC2(RM, PMULHW, pmulhw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13419 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmulhw_u128);
13420}
13421
13422
13423/* Opcode 0xf3 0x0f 0xe5 - invalid */
13424/* Opcode 0xf2 0x0f 0xe5 - invalid */
13425/* Opcode 0x0f 0xe6 - invalid */
13426
13427
13428/** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */
13429FNIEMOP_DEF(iemOp_cvttpd2dq_Vx_Wpd)
13430{
13431 IEMOP_MNEMONIC2(RM, CVTTPD2DQ, cvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13432 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvttpd2dq_u128);
13433}
13434
13435
13436/** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */
13437FNIEMOP_DEF(iemOp_cvtdq2pd_Vx_Wpd)
13438{
13439 IEMOP_MNEMONIC2(RM, CVTDQ2PD, cvtdq2pd, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13440 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtdq2pd_u128);
13441}
13442
13443
13444/** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */
13445FNIEMOP_DEF(iemOp_cvtpd2dq_Vx_Wpd)
13446{
13447 IEMOP_MNEMONIC2(RM, CVTPD2DQ, cvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13448 return FNIEMOP_CALL_1(iemOpCommonSse2Fp_FullFull_To_Full, iemAImpl_cvtpd2dq_u128);
13449}
13450
13451
13452/**
13453 * @opcode 0xe7
13454 * @opcodesub !11 mr/reg
13455 * @oppfx none
13456 * @opcpuid sse
13457 * @opgroup og_sse1_cachect
13458 * @opxcpttype none
13459 * @optest op1=-1 op2=2 -> op1=2 ftw=0xff
13460 * @optest op1=0 op2=-42 -> op1=-42 ftw=0xff
13461 */
13462FNIEMOP_DEF(iemOp_movntq_Mq_Pq)
13463{
13464 IEMOP_MNEMONIC2(MR_MEM, MOVNTQ, movntq, Mq_WO, Pq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
13465 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13466 if (IEM_IS_MODRM_MEM_MODE(bRm))
13467 {
13468 /* Register, memory. */
13469 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13470 IEM_MC_LOCAL(uint64_t, uSrc);
13471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13472
13473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fMmx);
13475 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
13476 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
13477 IEM_MC_FPU_TO_MMX_MODE();
13478
13479 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_REG_8(bRm));
13480 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13481
13482 IEM_MC_ADVANCE_RIP_AND_FINISH();
13483 IEM_MC_END();
13484 }
13485 /**
13486 * @opdone
13487 * @opmnemonic ud0fe7reg
13488 * @opcode 0xe7
13489 * @opcodesub 11 mr/reg
13490 * @oppfx none
13491 * @opunused immediate
13492 * @opcpuid sse
13493 * @optest ->
13494 */
13495 else
13496 IEMOP_RAISE_INVALID_OPCODE_RET();
13497}
13498
13499/**
13500 * @opcode 0xe7
13501 * @opcodesub !11 mr/reg
13502 * @oppfx 0x66
13503 * @opcpuid sse2
13504 * @opgroup og_sse2_cachect
13505 * @opxcpttype 1
13506 * @optest op1=-1 op2=2 -> op1=2
13507 * @optest op1=0 op2=-42 -> op1=-42
13508 */
13509FNIEMOP_DEF(iemOp_movntdq_Mdq_Vdq)
13510{
13511 IEMOP_MNEMONIC2(MR_MEM, MOVNTDQ, movntdq, Mdq_WO, Vdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13513 if (IEM_IS_MODRM_MEM_MODE(bRm))
13514 {
13515 /* Register, memory. */
13516 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13517 IEM_MC_LOCAL(RTUINT128U, uSrc);
13518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13519
13520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
13522 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13523 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
13524
13525 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
13526 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
13527
13528 IEM_MC_ADVANCE_RIP_AND_FINISH();
13529 IEM_MC_END();
13530 }
13531
13532 /**
13533 * @opdone
13534 * @opmnemonic ud660fe7reg
13535 * @opcode 0xe7
13536 * @opcodesub 11 mr/reg
13537 * @oppfx 0x66
13538 * @opunused immediate
13539 * @opcpuid sse
13540 * @optest ->
13541 */
13542 else
13543 IEMOP_RAISE_INVALID_OPCODE_RET();
13544}
13545
13546/* Opcode 0xf3 0x0f 0xe7 - invalid */
13547/* Opcode 0xf2 0x0f 0xe7 - invalid */
13548
13549
13550/** Opcode 0x0f 0xe8 - psubsb Pq, Qq */
13551FNIEMOP_DEF(iemOp_psubsb_Pq_Qq)
13552{
13553 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13554 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsb_u64);
13555}
13556
13557
13558/** Opcode 0x66 0x0f 0xe8 - psubsb Vx, Wx */
13559FNIEMOP_DEF(iemOp_psubsb_Vx_Wx)
13560{
13561 IEMOP_MNEMONIC2(RM, PSUBSB, psubsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13562 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsb_u128);
13563}
13564
13565
13566/* Opcode 0xf3 0x0f 0xe8 - invalid */
13567/* Opcode 0xf2 0x0f 0xe8 - invalid */
13568
13569/** Opcode 0x0f 0xe9 - psubsw Pq, Qq */
13570FNIEMOP_DEF(iemOp_psubsw_Pq_Qq)
13571{
13572 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13573 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubsw_u64);
13574}
13575
13576
13577/** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */
13578FNIEMOP_DEF(iemOp_psubsw_Vx_Wx)
13579{
13580 IEMOP_MNEMONIC2(RM, PSUBSW, psubsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13581 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubsw_u128);
13582}
13583
13584
13585/* Opcode 0xf3 0x0f 0xe9 - invalid */
13586/* Opcode 0xf2 0x0f 0xe9 - invalid */
13587
13588
13589/** Opcode 0x0f 0xea - pminsw Pq, Qq */
13590FNIEMOP_DEF(iemOp_pminsw_Pq_Qq)
13591{
13592 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13593 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pminsw_u64);
13594}
13595
13596
13597/** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */
13598FNIEMOP_DEF(iemOp_pminsw_Vx_Wx)
13599{
13600 IEMOP_MNEMONIC2(RM, PMINSW, pminsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13601 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pminsw_u128);
13602}
13603
13604
13605/* Opcode 0xf3 0x0f 0xea - invalid */
13606/* Opcode 0xf2 0x0f 0xea - invalid */
13607
13608
13609/** Opcode 0x0f 0xeb - por Pq, Qq */
13610FNIEMOP_DEF(iemOp_por_Pq_Qq)
13611{
13612 IEMOP_MNEMONIC2(RM, POR, por, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13613 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_por_u64);
13614}
13615
13616
13617/** Opcode 0x66 0x0f 0xeb - por Vx, Wx */
13618FNIEMOP_DEF(iemOp_por_Vx_Wx)
13619{
13620 IEMOP_MNEMONIC2(RM, POR, por, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13621 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_por_u128);
13622}
13623
13624
13625/* Opcode 0xf3 0x0f 0xeb - invalid */
13626/* Opcode 0xf2 0x0f 0xeb - invalid */
13627
13628/** Opcode 0x0f 0xec - paddsb Pq, Qq */
13629FNIEMOP_DEF(iemOp_paddsb_Pq_Qq)
13630{
13631 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13632 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsb_u64);
13633}
13634
13635
13636/** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */
13637FNIEMOP_DEF(iemOp_paddsb_Vx_Wx)
13638{
13639 IEMOP_MNEMONIC2(RM, PADDSB, paddsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13640 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsb_u128);
13641}
13642
13643
13644/* Opcode 0xf3 0x0f 0xec - invalid */
13645/* Opcode 0xf2 0x0f 0xec - invalid */
13646
13647/** Opcode 0x0f 0xed - paddsw Pq, Qq */
13648FNIEMOP_DEF(iemOp_paddsw_Pq_Qq)
13649{
13650 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13651 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddsw_u64);
13652}
13653
13654
13655/** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */
13656FNIEMOP_DEF(iemOp_paddsw_Vx_Wx)
13657{
13658 IEMOP_MNEMONIC2(RM, PADDSW, paddsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13659 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddsw_u128);
13660}
13661
13662
13663/* Opcode 0xf3 0x0f 0xed - invalid */
13664/* Opcode 0xf2 0x0f 0xed - invalid */
13665
13666
13667/** Opcode 0x0f 0xee - pmaxsw Pq, Qq */
13668FNIEMOP_DEF(iemOp_pmaxsw_Pq_Qq)
13669{
13670 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13671 return FNIEMOP_CALL_1(iemOpCommonMmxSse_FullFull_To_Full, iemAImpl_pmaxsw_u64);
13672}
13673
13674
13675/** Opcode 0x66 0x0f 0xee - pmaxsw Vx, Wx */
13676FNIEMOP_DEF(iemOp_pmaxsw_Vx_Wx)
13677{
13678 IEMOP_MNEMONIC2(RM, PMAXSW, pmaxsw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13679 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaxsw_u128);
13680}
13681
13682
13683/* Opcode 0xf3 0x0f 0xee - invalid */
13684/* Opcode 0xf2 0x0f 0xee - invalid */
13685
13686
13687/** Opcode 0x0f 0xef - pxor Pq, Qq */
13688FNIEMOP_DEF(iemOp_pxor_Pq_Qq)
13689{
13690 IEMOP_MNEMONIC2(RM, PXOR, pxor, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13691 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pxor_u64);
13692}
13693
13694
13695/** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */
13696FNIEMOP_DEF(iemOp_pxor_Vx_Wx)
13697{
13698 IEMOP_MNEMONIC2(RM, PXOR, pxor, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13699 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pxor_u128);
13700}
13701
13702
13703/* Opcode 0xf3 0x0f 0xef - invalid */
13704/* Opcode 0xf2 0x0f 0xef - invalid */
13705
13706/* Opcode 0x0f 0xf0 - invalid */
13707/* Opcode 0x66 0x0f 0xf0 - invalid */
13708
13709
13710/** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */
13711FNIEMOP_DEF(iemOp_lddqu_Vx_Mx)
13712{
13713 IEMOP_MNEMONIC2(RM_MEM, LDDQU, lddqu, Vdq_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13714 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13715 if (IEM_IS_MODRM_REG_MODE(bRm))
13716 {
13717 /*
13718 * Register, register - (not implemented, assuming it raises \#UD).
13719 */
13720 IEMOP_RAISE_INVALID_OPCODE_RET();
13721 }
13722 else
13723 {
13724 /*
13725 * Register, memory.
13726 */
13727 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
13728 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
13729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13730
13731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse3);
13733 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
13734 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
13735 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13736 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
13737
13738 IEM_MC_ADVANCE_RIP_AND_FINISH();
13739 IEM_MC_END();
13740 }
13741}
13742
13743
13744/** Opcode 0x0f 0xf1 - psllw Pq, Qq */
13745FNIEMOP_DEF(iemOp_psllw_Pq_Qq)
13746{
13747 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13748 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllw_u64);
13749}
13750
13751
13752/** Opcode 0x66 0x0f 0xf1 - psllw Vx, Wx */
13753FNIEMOP_DEF(iemOp_psllw_Vx_Wx)
13754{
13755 IEMOP_MNEMONIC2(RM, PSLLW, psllw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13756 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllw_u128);
13757}
13758
13759
13760/* Opcode 0xf2 0x0f 0xf1 - invalid */
13761
13762/** Opcode 0x0f 0xf2 - pslld Pq, Qq */
13763FNIEMOP_DEF(iemOp_pslld_Pq_Qq)
13764{
13765 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13766 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_pslld_u64);
13767}
13768
13769
13770/** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */
13771FNIEMOP_DEF(iemOp_pslld_Vx_Wx)
13772{
13773 IEMOP_MNEMONIC2(RM, PSLLD, pslld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13774 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_pslld_u128);
13775}
13776
13777
13778/* Opcode 0xf2 0x0f 0xf2 - invalid */
13779
13780/** Opcode 0x0f 0xf3 - psllq Pq, Qq */
13781FNIEMOP_DEF(iemOp_psllq_Pq_Qq)
13782{
13783 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13784 return FNIEMOP_CALL_1(iemOpCommonMmxOpt_FullFull_To_Full, iemAImpl_psllq_u64);
13785}
13786
13787
13788/** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */
13789FNIEMOP_DEF(iemOp_psllq_Vx_Wx)
13790{
13791 IEMOP_MNEMONIC2(RM, PSLLQ, psllq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13792 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psllq_u128);
13793}
13794
13795/* Opcode 0xf2 0x0f 0xf3 - invalid */
13796
13797/** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */
13798FNIEMOP_DEF(iemOp_pmuludq_Pq_Qq)
13799{
13800 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13801 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmuludq_u64);
13802}
13803
13804
13805/** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */
13806FNIEMOP_DEF(iemOp_pmuludq_Vx_Wx)
13807{
13808 IEMOP_MNEMONIC2(RM, PMULUDQ, pmuludq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13809 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmuludq_u128);
13810}
13811
13812
13813/* Opcode 0xf2 0x0f 0xf4 - invalid */
13814
13815/** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */
13816FNIEMOP_DEF(iemOp_pmaddwd_Pq_Qq)
13817{
13818 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, 0);
13819 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_pmaddwd_u64);
13820}
13821
13822
13823/** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */
13824FNIEMOP_DEF(iemOp_pmaddwd_Vx_Wx)
13825{
13826 IEMOP_MNEMONIC2(RM, PMADDWD, pmaddwd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
13827 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_pmaddwd_u128);
13828}
13829
13830/* Opcode 0xf2 0x0f 0xf5 - invalid */
13831
13832/** Opcode 0x0f 0xf6 - psadbw Pq, Qq */
13833FNIEMOP_DEF(iemOp_psadbw_Pq_Qq)
13834{
13835 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13836 return FNIEMOP_CALL_1(iemOpCommonMmxSseOpt_FullFull_To_Full, iemAImpl_psadbw_u64);
13837}
13838
13839
13840/** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */
13841FNIEMOP_DEF(iemOp_psadbw_Vx_Wx)
13842{
13843 IEMOP_MNEMONIC2(RM, PSADBW, psadbw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13844 return FNIEMOP_CALL_1(iemOpCommonSse2Opt_FullFull_To_Full, iemAImpl_psadbw_u128);
13845}
13846
13847
13848/* Opcode 0xf2 0x0f 0xf6 - invalid */
13849
13850/** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */
13851FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq);
13852/** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */
13853FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq);
13854/* Opcode 0xf2 0x0f 0xf7 - invalid */
13855
13856
13857/** Opcode 0x0f 0xf8 - psubb Pq, Qq */
13858FNIEMOP_DEF(iemOp_psubb_Pq_Qq)
13859{
13860 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13861 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubb_u64);
13862}
13863
13864
13865/** Opcode 0x66 0x0f 0xf8 - psubb Vx, Wx */
13866FNIEMOP_DEF(iemOp_psubb_Vx_Wx)
13867{
13868 IEMOP_MNEMONIC2(RM, PSUBB, psubb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13869 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubb_u128);
13870}
13871
13872
13873/* Opcode 0xf2 0x0f 0xf8 - invalid */
13874
13875
13876/** Opcode 0x0f 0xf9 - psubw Pq, Qq */
13877FNIEMOP_DEF(iemOp_psubw_Pq_Qq)
13878{
13879 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13880 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubw_u64);
13881}
13882
13883
13884/** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */
13885FNIEMOP_DEF(iemOp_psubw_Vx_Wx)
13886{
13887 IEMOP_MNEMONIC2(RM, PSUBW, psubw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13888 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubw_u128);
13889}
13890
13891
13892/* Opcode 0xf2 0x0f 0xf9 - invalid */
13893
13894
13895/** Opcode 0x0f 0xfa - psubd Pq, Qq */
13896FNIEMOP_DEF(iemOp_psubd_Pq_Qq)
13897{
13898 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13899 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_psubd_u64);
13900}
13901
13902
13903/** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */
13904FNIEMOP_DEF(iemOp_psubd_Vx_Wx)
13905{
13906 IEMOP_MNEMONIC2(RM, PSUBD, psubd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13907 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubd_u128);
13908}
13909
13910
13911/* Opcode 0xf2 0x0f 0xfa - invalid */
13912
13913
13914/** Opcode 0x0f 0xfb - psubq Pq, Qq */
13915FNIEMOP_DEF(iemOp_psubq_Pq_Qq)
13916{
13917 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13918 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Sse2, iemAImpl_psubq_u64);
13919}
13920
13921
13922/** Opcode 0x66 0x0f 0xfb - psubq Vx, Wx */
13923FNIEMOP_DEF(iemOp_psubq_Vx_Wx)
13924{
13925 IEMOP_MNEMONIC2(RM, PSUBQ, psubq, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13926 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_psubq_u128);
13927}
13928
13929
13930/* Opcode 0xf2 0x0f 0xfb - invalid */
13931
13932
13933/** Opcode 0x0f 0xfc - paddb Pq, Qq */
13934FNIEMOP_DEF(iemOp_paddb_Pq_Qq)
13935{
13936 IEMOP_MNEMONIC2(RM, PADDB, paddb, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13937 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddb_u64);
13938}
13939
13940
13941/** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */
13942FNIEMOP_DEF(iemOp_paddb_Vx_Wx)
13943{
13944 IEMOP_MNEMONIC2(RM, PADDB, paddb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13945 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddb_u128);
13946}
13947
13948
13949/* Opcode 0xf2 0x0f 0xfc - invalid */
13950
13951
13952/** Opcode 0x0f 0xfd - paddw Pq, Qq */
13953FNIEMOP_DEF(iemOp_paddw_Pq_Qq)
13954{
13955 IEMOP_MNEMONIC2(RM, PADDW, paddw, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13956 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddw_u64);
13957}
13958
13959
13960/** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */
13961FNIEMOP_DEF(iemOp_paddw_Vx_Wx)
13962{
13963 IEMOP_MNEMONIC2(RM, PADDW, paddw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13964 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddw_u128);
13965}
13966
13967
13968/* Opcode 0xf2 0x0f 0xfd - invalid */
13969
13970
13971/** Opcode 0x0f 0xfe - paddd Pq, Qq */
13972FNIEMOP_DEF(iemOp_paddd_Pq_Qq)
13973{
13974 IEMOP_MNEMONIC2(RM, PADDD, paddd, Pq, Qq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_MMX, IEMOPHINT_IGNORES_OP_SIZES);
13975 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, iemAImpl_paddd_u64);
13976}
13977
13978
13979/** Opcode 0x66 0x0f 0xfe - paddd Vx, W */
13980FNIEMOP_DEF(iemOp_paddd_Vx_Wx)
13981{
13982 IEMOP_MNEMONIC2(RM, PADDD, paddd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
13983 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, iemAImpl_paddd_u128);
13984}
13985
13986
13987/* Opcode 0xf2 0x0f 0xfe - invalid */
13988
13989
13990/** Opcode **** 0x0f 0xff - UD0 */
13991FNIEMOP_DEF(iemOp_ud0)
13992{
13993 IEMOP_MNEMONIC(ud0, "ud0");
13994 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
13995 {
13996 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
13997 if (IEM_IS_MODRM_MEM_MODE(bRm))
13998 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
13999 }
14000 IEMOP_HLP_DONE_DECODING();
14001 IEMOP_RAISE_INVALID_OPCODE_RET();
14002}
14003
14004
14005
14006/**
14007 * Two byte opcode map, first byte 0x0f.
14008 *
14009 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please
14010 * check if it needs updating as well when making changes.
14011 */
14012const PFNIEMOP g_apfnTwoByteMap[] =
14013{
14014 /* no prefix, 066h prefix f3h prefix, f2h prefix */
14015 /* 0x00 */ IEMOP_X4(iemOp_Grp6),
14016 /* 0x01 */ IEMOP_X4(iemOp_Grp7),
14017 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),
14018 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),
14019 /* 0x04 */ IEMOP_X4(iemOp_Invalid),
14020 /* 0x05 */ IEMOP_X4(iemOp_syscall),
14021 /* 0x06 */ IEMOP_X4(iemOp_clts),
14022 /* 0x07 */ IEMOP_X4(iemOp_sysret),
14023 /* 0x08 */ IEMOP_X4(iemOp_invd),
14024 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),
14025 /* 0x0a */ IEMOP_X4(iemOp_Invalid),
14026 /* 0x0b */ IEMOP_X4(iemOp_ud2),
14027 /* 0x0c */ IEMOP_X4(iemOp_Invalid),
14028 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),
14029 /* 0x0e */ IEMOP_X4(iemOp_femms),
14030 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),
14031
14032 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vsd_Wsd,
14033 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd,
14034 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq,
14035 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14036 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14037 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14038 /* 0x16 */ iemOp_movhps_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpd_Vdq_Mq, iemOp_movshdup_Vdq_Wdq, iemOp_InvalidNeedRM,
14039 /* 0x17 */ iemOp_movhps_Mq_Vq, iemOp_movhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14040 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),
14041 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),
14042 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),
14043 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),
14044 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),
14045 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),
14046 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),
14047 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),
14048
14049 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,
14050 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,
14051 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,
14052 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,
14053 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,
14054 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14055 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,
14056 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,
14057 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14058 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14059 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey,
14060 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14061 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd,
14062 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd,
14063 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14064 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14065
14066 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),
14067 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),
14068 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),
14069 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),
14070 /* 0x34 */ IEMOP_X4(iemOp_sysenter),
14071 /* 0x35 */ IEMOP_X4(iemOp_sysexit),
14072 /* 0x36 */ IEMOP_X4(iemOp_Invalid),
14073 /* 0x37 */ IEMOP_X4(iemOp_getsec),
14074 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_0f_38),
14075 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14076 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_0f_3a),
14077 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14078 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14079 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),
14080 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14081 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),
14082
14083 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),
14084 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),
14085 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),
14086 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),
14087 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),
14088 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),
14089 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),
14090 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),
14091 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),
14092 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),
14093 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),
14094 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),
14095 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),
14096 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),
14097 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),
14098 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),
14099
14100 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14101 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd,
14102 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM,
14103 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM,
14104 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14105 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14106 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14107 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14108 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd,
14109 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd,
14110 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd,
14111 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
14112 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd,
14113 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd,
14114 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd,
14115 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd,
14116
14117 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14118 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14119 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14120 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14121 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14122 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14123 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14124 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14125 /* 0x68 */ iemOp_punpckhbw_Pq_Qq, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14126 /* 0x69 */ iemOp_punpckhwd_Pq_Qq, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14127 /* 0x6a */ iemOp_punpckhdq_Pq_Qq, iemOp_punpckhdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14128 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14129 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14130 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14131 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14132 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vdq_Wdq, iemOp_movdqu_Vdq_Wdq, iemOp_InvalidNeedRM,
14133
14134 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib,
14135 /* 0x71 */ IEMOP_X4(iemOp_Grp12),
14136 /* 0x72 */ IEMOP_X4(iemOp_Grp13),
14137 /* 0x73 */ IEMOP_X4(iemOp_Grp14),
14138 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14139 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14140 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14141 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14142
14143 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14144 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14145 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14146 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14147 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps,
14148 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps,
14149 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM,
14150 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM,
14151
14152 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),
14153 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),
14154 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),
14155 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),
14156 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),
14157 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),
14158 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),
14159 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),
14160 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),
14161 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),
14162 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),
14163 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),
14164 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),
14165 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),
14166 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),
14167 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),
14168
14169 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),
14170 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),
14171 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),
14172 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),
14173 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),
14174 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),
14175 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),
14176 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),
14177 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),
14178 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),
14179 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),
14180 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),
14181 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),
14182 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),
14183 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),
14184 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),
14185
14186 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),
14187 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),
14188 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),
14189 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),
14190 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),
14191 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),
14192 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
14193 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
14194 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),
14195 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),
14196 /* 0xaa */ IEMOP_X4(iemOp_rsm),
14197 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),
14198 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),
14199 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),
14200 /* 0xae */ IEMOP_X4(iemOp_Grp15),
14201 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),
14202
14203 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),
14204 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),
14205 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),
14206 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),
14207 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),
14208 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),
14209 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),
14210 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),
14211 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,
14212 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),
14213 /* 0xba */ IEMOP_X4(iemOp_Grp8),
14214 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?
14215 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,
14216 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,
14217 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),
14218 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),
14219
14220 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),
14221 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),
14222 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib,
14223 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14224 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14225 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14226 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
14227 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),
14228 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),
14229 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),
14230 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),
14231 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),
14232 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),
14233 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),
14234 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),
14235 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),
14236
14237 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps,
14238 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14239 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14240 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14241 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14242 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14243 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,
14244 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14245 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14246 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14247 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14248 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14249 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14250 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14251 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14252 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14253
14254 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14255 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14256 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14257 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14258 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14259 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14260 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd,
14261 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mdq_Vdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14262 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14263 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14264 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14265 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14266 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14267 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14268 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14269 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14270
14271 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx,
14272 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14273 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14274 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14275 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14276 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14277 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14278 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14279 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14280 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14281 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14282 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14283 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14284 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14285 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
14286 /* 0xff */ IEMOP_X4(iemOp_ud0),
14287};
14288AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);
14289
14290/** @} */
14291
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette