VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap2.cpp.h@ 105445

Last change on this file since 105445 was 105445, checked in by vboxsync, 4 months ago

VMM/IEM: Fold IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT() into IEM_MC_CALL_SSE_AIMPL_X()/IEM_MC_CALL_AVX_AIMPL_X(), bugref:10652

The current way raising exceptions doesn't work as the IEM would raise an #XF/#UD if an exception is unmasked and the corresponding
exception status flag is set, even if the current instruction wouldn't generate that exception.
The Intel Architecture manual states that exception flags are sticky and need manual clearing through ldmxcsr/xrstor but an exception
is only generated from an internal set of flags for the current operation. In order to avoid introducing temporary MXCSR values increasing
the overhead for native emitters later on exception status calculation and raising is now done in the IEM_MC_CALL_SSE_AIMPL_X() and
IEM_MC_CALL_AVX_AIMPL_X() IEM microcode statements.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 145.3 KB
Line 
1/* $Id: IEMAllInstVexMap2.cpp.h 105445 2024-07-23 12:17:44Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstThree0f38.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 2
33 * @{
34 */
35
36/**
37 * Common worker for AESNI/AVX instructions on the forms:
38 * - vaesxxx xmm0, xmm1, xmm2/mem128
39 *
40 * Exceptions type 4. AVX and AESNI cpuid check for 128-bit operation.
41 */
42FNIEMOP_DEF_1(iemOpCommonAvxAesNi_Vx_Hx_Wx, PFNIEMAIMPLMEDIAOPTF3U128, pfnU128)
43{
44 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
45 if (IEM_IS_MODRM_REG_MODE(bRm))
46 {
47 /*
48 * Register, register.
49 */
50 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
51 IEMOP_HLP_DONE_VEX_DECODING_L0_EX_2(fAvx, fAesNi);
52 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
53 IEM_MC_PREPARE_AVX_USAGE();
54
55 IEM_MC_LOCAL(RTUINT128U, uDst);
56 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, puDst, uDst, 0);
57 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
58 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
59 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
60 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
61 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc1, puSrc2);
62 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
63 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
64 IEM_MC_ADVANCE_RIP_AND_FINISH();
65 IEM_MC_END();
66 }
67 else
68 {
69 /*
70 * Register, memory.
71 */
72 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
73 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
74 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
75 IEMOP_HLP_DONE_VEX_DECODING_L0_EX_2(fAvx, fAesNi);
76 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
77 IEM_MC_PREPARE_AVX_USAGE();
78
79 IEM_MC_LOCAL(RTUINT128U, uDst);
80 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, puDst, uDst, 0);
81 IEM_MC_LOCAL(RTUINT128U, uSrc2);
82 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
83 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
84 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
85 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
86
87 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc1, puSrc2);
88 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
89 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
90 IEM_MC_ADVANCE_RIP_AND_FINISH();
91 IEM_MC_END();
92 }
93}
94
95
96/* Opcode VEX.0F38 0x00 - invalid. */
97
98
99/** Opcode VEX.66.0F38 0x00. */
100FNIEMOP_DEF(iemOp_vpshufb_Vx_Hx_Wx)
101{
102 IEMOP_MNEMONIC3(VEX_RVM, VPSHUFB, vpshufb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
103 IEMOPMEDIAOPTF3_INIT_VARS( vpshufb);
104 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
105}
106
107
108/* Opcode VEX.0F38 0x01 - invalid. */
109
110
111/** Opcode VEX.66.0F38 0x01. */
112FNIEMOP_DEF(iemOp_vphaddw_Vx_Hx_Wx)
113{
114 IEMOP_MNEMONIC3(VEX_RVM, VPHADDW, vphaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
115 IEMOPMEDIAOPTF3_INIT_VARS(vphaddw);
116 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
117}
118
119
120/* Opcode VEX.0F38 0x02 - invalid. */
121
122
123/** Opcode VEX.66.0F38 0x02. */
124FNIEMOP_DEF(iemOp_vphaddd_Vx_Hx_Wx)
125{
126 IEMOP_MNEMONIC3(VEX_RVM, VPHADDD, vphaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
127 IEMOPMEDIAOPTF3_INIT_VARS(vphaddd);
128 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
129}
130
131
132/* Opcode VEX.0F38 0x03 - invalid. */
133
134
135/** Opcode VEX.66.0F38 0x03. */
136FNIEMOP_DEF(iemOp_vphaddsw_Vx_Hx_Wx)
137{
138 IEMOP_MNEMONIC3(VEX_RVM, VPHADDSW, vphaddsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
139 IEMOPMEDIAOPTF3_INIT_VARS(vphaddsw);
140 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
141}
142
143
144/* Opcode VEX.0F38 0x04 - invalid. */
145
146
147/** Opcode VEX.66.0F38 0x04. */
148FNIEMOP_DEF(iemOp_vpmaddubsw_Vx_Hx_Wx)
149{
150 IEMOP_MNEMONIC3(VEX_RVM, VPMADDUBSW, vpmaddubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
151 IEMOPMEDIAOPTF3_INIT_VARS(vpmaddubsw);
152 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
153}
154
155
156/* Opcode VEX.0F38 0x05 - invalid. */
157
158
159/** Opcode VEX.66.0F38 0x05. */
160FNIEMOP_DEF(iemOp_vphsubw_Vx_Hx_Wx)
161{
162 IEMOP_MNEMONIC3(VEX_RVM, VPHSUBW, vphsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
163 IEMOPMEDIAOPTF3_INIT_VARS(vphsubw);
164 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
165}
166
167
168/* Opcode VEX.0F38 0x06 - invalid. */
169
170
171/** Opcode VEX.66.0F38 0x06. */
172FNIEMOP_DEF(iemOp_vphsubd_Vx_Hx_Wx)
173{
174 IEMOP_MNEMONIC3(VEX_RVM, VPHSUBD, vphsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
175 IEMOPMEDIAOPTF3_INIT_VARS(vphsubd);
176 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
177}
178
179
180/* Opcode VEX.0F38 0x07 - invalid. */
181
182
183/** Opcode VEX.66.0F38 0x07. */
184FNIEMOP_DEF(iemOp_vphsubsw_Vx_Hx_Wx)
185{
186 IEMOP_MNEMONIC3(VEX_RVM, VPHSUBSW, vphsubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
187 IEMOPMEDIAOPTF3_INIT_VARS(vphsubsw);
188 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
189}
190
191
192/* Opcode VEX.0F38 0x08 - invalid. */
193
194
195/** Opcode VEX.66.0F38 0x08. */
196FNIEMOP_DEF(iemOp_vpsignb_Vx_Hx_Wx)
197{
198 IEMOP_MNEMONIC3(VEX_RVM, VPSIGNB, vpsignb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
199 IEMOPMEDIAOPTF3_INIT_VARS(vpsignb);
200 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
201}
202
203
204/* Opcode VEX.0F38 0x09 - invalid. */
205
206
207/** Opcode VEX.66.0F38 0x09. */
208FNIEMOP_DEF(iemOp_vpsignw_Vx_Hx_Wx)
209{
210 IEMOP_MNEMONIC3(VEX_RVM, VPSIGNW, vpsignw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
211 IEMOPMEDIAOPTF3_INIT_VARS(vpsignw);
212 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
213}
214
215
216/* Opcode VEX.0F38 0x0a - invalid. */
217
218
219/** Opcode VEX.66.0F38 0x0a. */
220FNIEMOP_DEF(iemOp_vpsignd_Vx_Hx_Wx)
221{
222 IEMOP_MNEMONIC3(VEX_RVM, VPSIGND, vpsignd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
223 IEMOPMEDIAOPTF3_INIT_VARS(vpsignd);
224 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
225}
226
227
228/* Opcode VEX.0F38 0x0b - invalid. */
229
230
231/** Opcode VEX.66.0F38 0x0b. */
232FNIEMOP_DEF(iemOp_vpmulhrsw_Vx_Hx_Wx)
233{
234 IEMOP_MNEMONIC3(VEX_RVM, VPMULHRSW, vpmulhrsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
235 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhrsw);
236 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
237}
238
239
240/* Opcode VEX.0F38 0x0c - invalid. */
241
242
243/** Opcode VEX.66.0F38 0x0c.
244 * AVX,AVX */
245FNIEMOP_DEF(iemOp_vpermilps_Vx_Hx_Wx)
246{
247 IEMOP_MNEMONIC3(VEX_RVM, VPERMILPS, vpermilps, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */
248 IEMOPMEDIAOPTF3_INIT_VARS(vpermilps);
249 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
250}
251
252
253/* Opcode VEX.0F38 0x0d - invalid. */
254
255
256/** Opcode VEX.66.0F38 0x0d.
257 * AVX,AVX */
258FNIEMOP_DEF(iemOp_vpermilpd_Vx_Hx_Wx)
259{
260 IEMOP_MNEMONIC3(VEX_RVM, VPERMILPD, vpermilpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */
261 IEMOPMEDIAOPTF3_INIT_VARS(vpermilpd);
262 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
263}
264
265
266/**
267 * Common worker for AVX instructions on the forms:
268 * - vtestps/d xmm1, xmm2/mem128
269 * - vtestps/d ymm1, ymm2/mem256
270 *
271 * Takes function table for function w/o implicit state parameter.
272 *
273 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation.
274 */
275#define IEMOP_BODY_VTESTP_S_D(a_Instr) \
276 Assert(pVCpu->iem.s.uVexLength <= 1); \
277 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
278 if (IEM_IS_MODRM_REG_MODE(bRm)) \
279 { \
280 /* \
281 * Register, register. \
282 */ \
283 if (pVCpu->iem.s.uVexLength) \
284 { \
285 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
286 IEMOP_HLP_DONE_VEX_DECODING_W0_AND_NO_VVVV_EX(fAvx); \
287 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
288 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
289 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 0); \
290 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 1); \
291 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
292 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
293 IEM_MC_PREPARE_AVX_USAGE(); \
294 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); \
295 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
296 IEM_MC_REF_EFLAGS(pEFlags); \
297 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_ ## a_Instr ## _u256, \
298 iemAImpl_ ## a_Instr ## _u256_fallback), \
299 puSrc1, puSrc2, pEFlags); \
300 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
301 IEM_MC_END(); \
302 } \
303 else \
304 { \
305 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
306 IEMOP_HLP_DONE_VEX_DECODING_W0_AND_NO_VVVV_EX(fAvx); \
307 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0); \
308 IEM_MC_ARG(PCRTUINT128U, puSrc2, 1); \
309 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
310 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
311 IEM_MC_PREPARE_AVX_USAGE(); \
312 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); \
313 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
314 IEM_MC_REF_EFLAGS(pEFlags); \
315 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_ ## a_Instr ## _u128, \
316 iemAImpl_ ## a_Instr ## _u128_fallback), \
317 puSrc1, puSrc2, pEFlags); \
318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
319 IEM_MC_END(); \
320 } \
321 } \
322 else \
323 { \
324 /* \
325 * Register, memory. \
326 */ \
327 if (pVCpu->iem.s.uVexLength) \
328 { \
329 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
330 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
331 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
333 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 0); \
334 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 1); \
335 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
337 IEMOP_HLP_DONE_VEX_DECODING_W0_AND_NO_VVVV_EX(fAvx); \
338 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
339 IEM_MC_PREPARE_AVX_USAGE(); \
340 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
341 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); \
342 IEM_MC_REF_EFLAGS(pEFlags); \
343 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_ ## a_Instr ## _u256, \
344 iemAImpl_ ## a_Instr ## _u256_fallback), \
345 puSrc1, puSrc2, pEFlags); \
346 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
347 IEM_MC_END(); \
348 } \
349 else \
350 { \
351 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
352 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
354 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0); \
355 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 1); \
356 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
358 IEMOP_HLP_DONE_VEX_DECODING_W0_AND_NO_VVVV_EX(fAvx); \
359 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
360 IEM_MC_PREPARE_AVX_USAGE(); \
361 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
362 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); \
363 IEM_MC_REF_EFLAGS(pEFlags); \
364 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_ ## a_Instr ## _u128, \
365 iemAImpl_ ## a_Instr ## _u128_fallback), \
366 puSrc1, puSrc2, pEFlags); \
367 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
368 IEM_MC_END(); \
369 } \
370 } \
371 (void)0
372
373
374/* Opcode VEX.0F38 0x0e - invalid. */
375
376
377/**
378 * @opcode 0x0e
379 * @oppfx 0x66
380 * @opflmodify cf,zf,pf,af,sf,of
381 * @opflclear pf,af,sf,of
382 */
383FNIEMOP_DEF(iemOp_vtestps_Vx_Wx)
384{
385 /** @todo We need to check VEX.W somewhere... it is documented to \#UD on all
386 * CPU modes. */
387 IEMOP_MNEMONIC2(VEX_RM, VTESTPS, vtestps, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_W_ZERO);
388 IEMOP_BODY_VTESTP_S_D(vtestps);
389}
390
391
392/* Opcode VEX.0F38 0x0f - invalid. */
393
394
395/**
396 * @opcode 0x0f
397 * @oppfx 0x66
398 * @opflmodify cf,zf,pf,af,sf,of
399 * @opflclear pf,af,sf,of
400 */
401FNIEMOP_DEF(iemOp_vtestpd_Vx_Wx)
402{
403 /** @todo We need to check VEX.W somewhere... it is documented to \#UD on all
404 * CPU modes. */
405 IEMOP_MNEMONIC2(VEX_RM, VTESTPD, vtestpd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_W_ZERO);
406 IEMOP_BODY_VTESTP_S_D(vtestpd);
407}
408
409
410/* Opcode VEX.0F38 0x10 - invalid */
411/* Opcode VEX.66.0F38 0x10 - invalid (legacy only). */
412/* Opcode VEX.0F38 0x11 - invalid */
413/* Opcode VEX.66.0F38 0x11 - invalid */
414/* Opcode VEX.0F38 0x12 - invalid */
415/* Opcode VEX.66.0F38 0x12 - invalid */
416/* Opcode VEX.0F38 0x13 - invalid */
417/* Opcode VEX.66.0F38 0x13 (vex only). */
418FNIEMOP_STUB(iemOp_vcvtph2ps_Vx_Wx);
419/* Opcode VEX.0F38 0x14 - invalid */
420/* Opcode VEX.66.0F38 0x14 - invalid (legacy only). */
421/* Opcode VEX.0F38 0x15 - invalid */
422/* Opcode VEX.66.0F38 0x15 - invalid (legacy only). */
423/* Opcode VEX.0F38 0x16 - invalid */
424
425
426/** Opcode VEX.66.0F38 0x16. */
427FNIEMOP_DEF(iemOp_vpermps_Vqq_Hqq_Wqq)
428{
429 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
430 if (IEM_IS_MODRM_REG_MODE(bRm))
431 {
432 /*
433 * Register, register.
434 */
435 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
436 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
437 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
438 IEM_MC_PREPARE_AVX_USAGE();
439 IEM_MC_LOCAL(RTUINT256U, uSrc1);
440 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
441 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
442 IEM_MC_LOCAL(RTUINT256U, uSrc2);
443 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
444 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
445 IEM_MC_LOCAL(RTUINT256U, uDst);
446 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
447 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpermps_u256, iemAImpl_vpermps_u256_fallback),
448 puDst, puSrc1, puSrc2);
449 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
450 IEM_MC_ADVANCE_RIP_AND_FINISH();
451 IEM_MC_END();
452 }
453 else
454 {
455 /*
456 * Register, memory.
457 */
458 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
461 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
462 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
463 IEM_MC_PREPARE_AVX_USAGE();
464 IEM_MC_LOCAL(RTUINT256U, uSrc2);
465 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
466 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
467 IEM_MC_LOCAL(RTUINT256U, uSrc1);
468 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
469 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
470 IEM_MC_LOCAL(RTUINT256U, uDst);
471 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
472 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpermps_u256, iemAImpl_vpermps_u256_fallback),
473 puDst, puSrc1, puSrc2);
474 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
475 IEM_MC_ADVANCE_RIP_AND_FINISH();
476 IEM_MC_END();
477 }
478}
479
480
481/* Opcode VEX.0F38 0x17 - invalid */
482
483
484/**
485 * @opcode 0x17
486 * @oppfx 0x66
487 * @opflmodify cf,pf,af,zf,sf,of
488 * @opflclear pf,af,sf,of
489 */
490FNIEMOP_DEF(iemOp_vptest_Vx_Wx)
491{
492 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
493 if (IEM_IS_MODRM_REG_MODE(bRm))
494 {
495 /*
496 * Register, register.
497 */
498 if (pVCpu->iem.s.uVexLength)
499 {
500 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
501 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
502 IEM_MC_LOCAL(RTUINT256U, uSrc1);
503 IEM_MC_LOCAL(RTUINT256U, uSrc2);
504 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 0);
505 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 1);
506 IEM_MC_ARG(uint32_t *, pEFlags, 2);
507 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
508 IEM_MC_PREPARE_AVX_USAGE();
509 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
510 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
511 IEM_MC_REF_EFLAGS(pEFlags);
512 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vptest_u256, iemAImpl_vptest_u256_fallback),
513 puSrc1, puSrc2, pEFlags);
514 IEM_MC_ADVANCE_RIP_AND_FINISH();
515 IEM_MC_END();
516 }
517 else
518 {
519 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
520 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
521 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
522 IEM_MC_ARG(PCRTUINT128U, puSrc2, 1);
523 IEM_MC_ARG(uint32_t *, pEFlags, 2);
524 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
525 IEM_MC_PREPARE_AVX_USAGE();
526 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
527 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
528 IEM_MC_REF_EFLAGS(pEFlags);
529 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
530 IEM_MC_ADVANCE_RIP_AND_FINISH();
531 IEM_MC_END();
532 }
533 }
534 else
535 {
536 /*
537 * Register, memory.
538 */
539 if (pVCpu->iem.s.uVexLength)
540 {
541 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
542 IEM_MC_LOCAL(RTUINT256U, uSrc1);
543 IEM_MC_LOCAL(RTUINT256U, uSrc2);
544 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
545 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 0);
546 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 1);
547 IEM_MC_ARG(uint32_t *, pEFlags, 2);
548
549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
550 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
551 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
552 IEM_MC_PREPARE_AVX_USAGE();
553
554 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
555 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
556 IEM_MC_REF_EFLAGS(pEFlags);
557 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vptest_u256, iemAImpl_vptest_u256_fallback),
558 puSrc1, puSrc2, pEFlags);
559
560 IEM_MC_ADVANCE_RIP_AND_FINISH();
561 IEM_MC_END();
562 }
563 else
564 {
565 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
566 IEM_MC_LOCAL(RTUINT128U, uSrc2);
567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
568 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
569 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 1);
570 IEM_MC_ARG(uint32_t *, pEFlags, 2);
571
572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
573 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
574 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
575 IEM_MC_PREPARE_AVX_USAGE();
576
577 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
578 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
579 IEM_MC_REF_EFLAGS(pEFlags);
580 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
581
582 IEM_MC_ADVANCE_RIP_AND_FINISH();
583 IEM_MC_END();
584 }
585 }
586}
587
588
589/* Opcode VEX.0F38 0x18 - invalid */
590
591
592/** Opcode VEX.66.0F38 0x18. */
593FNIEMOP_DEF(iemOp_vbroadcastss_Vx_Wd)
594{
595 IEMOP_MNEMONIC2(VEX_RM, VBROADCASTSS, vbroadcastss, Vx, Wx, DISOPTYPE_HARMLESS, 0);
596 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
597 if (IEM_IS_MODRM_REG_MODE(bRm))
598 {
599 /*
600 * Register, register.
601 */
602 if (pVCpu->iem.s.uVexLength)
603 {
604 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
605 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
606 IEM_MC_LOCAL(uint32_t, uSrc);
607
608 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
609 IEM_MC_PREPARE_AVX_USAGE();
610
611 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
612 IEM_MC_BROADCAST_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
613
614 IEM_MC_ADVANCE_RIP_AND_FINISH();
615 IEM_MC_END();
616 }
617 else
618 {
619 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
620 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
621 IEM_MC_LOCAL(uint32_t, uSrc);
622
623 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
624 IEM_MC_PREPARE_AVX_USAGE();
625 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
626 IEM_MC_BROADCAST_XREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
627
628 IEM_MC_ADVANCE_RIP_AND_FINISH();
629 IEM_MC_END();
630 }
631 }
632 else
633 {
634 /*
635 * Register, memory.
636 */
637 if (pVCpu->iem.s.uVexLength)
638 {
639 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
640 IEM_MC_LOCAL(uint32_t, uSrc);
641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
642
643 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
644 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
645 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
646 IEM_MC_PREPARE_AVX_USAGE();
647
648 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
649 IEM_MC_BROADCAST_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
650
651 IEM_MC_ADVANCE_RIP_AND_FINISH();
652 IEM_MC_END();
653 }
654 else
655 {
656 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
657 IEM_MC_LOCAL(uint32_t, uSrc);
658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
659
660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
661 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
662 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
663 IEM_MC_PREPARE_AVX_USAGE();
664
665 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
666 IEM_MC_BROADCAST_XREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
667
668 IEM_MC_ADVANCE_RIP_AND_FINISH();
669 IEM_MC_END();
670 }
671 }
672}
673
674
675/* Opcode VEX.0F38 0x19 - invalid */
676
677
678/** Opcode VEX.66.0F38 0x19. */
679FNIEMOP_DEF(iemOp_vbroadcastsd_Vqq_Wq)
680{
681 IEMOP_MNEMONIC2(VEX_RM, VBROADCASTSD, vbroadcastsd, Vx, Wx, DISOPTYPE_HARMLESS, 0);
682 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
683 if (IEM_IS_MODRM_REG_MODE(bRm))
684 {
685 /*
686 * Register, register.
687 */
688 if (pVCpu->iem.s.uVexLength)
689 {
690 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
691 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
692 IEM_MC_LOCAL(uint64_t, uSrc);
693
694 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
695 IEM_MC_PREPARE_AVX_USAGE();
696
697 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
698 IEM_MC_BROADCAST_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
699
700 IEM_MC_ADVANCE_RIP_AND_FINISH();
701 IEM_MC_END();
702 }
703 else
704 {
705 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
706 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
707 IEM_MC_LOCAL(uint64_t, uSrc);
708
709 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
710 IEM_MC_PREPARE_AVX_USAGE();
711 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
712 IEM_MC_BROADCAST_XREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
713
714 IEM_MC_ADVANCE_RIP_AND_FINISH();
715 IEM_MC_END();
716 }
717 }
718 else
719 {
720 /*
721 * Register, memory.
722 */
723 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
724 IEM_MC_LOCAL(uint64_t, uSrc);
725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
726
727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
728 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx);
729 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
730 IEM_MC_PREPARE_AVX_USAGE();
731
732 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
733 IEM_MC_BROADCAST_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
734
735 IEM_MC_ADVANCE_RIP_AND_FINISH();
736 IEM_MC_END();
737 }
738}
739
740
741/* Opcode VEX.0F38 0x1a - invalid */
742
743
744/** Opcode VEX.66.0F38 0x1a. */
745FNIEMOP_DEF(iemOp_vbroadcastf128_Vqq_Mdq)
746{
747 IEMOP_MNEMONIC2(VEX_RM, VBROADCASTF128, vbroadcastf128, Vx, Wx, DISOPTYPE_HARMLESS, 0);
748 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
749 if (IEM_IS_MODRM_REG_MODE(bRm))
750 {
751 /*
752 * No register, register.
753 */
754 IEMOP_RAISE_INVALID_OPCODE_RET();
755 }
756 else
757 {
758 /*
759 * Register, memory.
760 */
761 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
762 IEM_MC_LOCAL(RTUINT128U, uSrc);
763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
764
765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
766 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx);
767 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
768 IEM_MC_PREPARE_AVX_USAGE();
769
770 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
771 IEM_MC_BROADCAST_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
772
773 IEM_MC_ADVANCE_RIP_AND_FINISH();
774 IEM_MC_END();
775 }
776}
777
778
779/* Opcode VEX.0F38 0x1b - invalid */
780/* Opcode VEX.66.0F38 0x1b - invalid */
781/* Opcode VEX.0F38 0x1c - invalid. */
782
783
784/** Opcode VEX.66.0F38 0x1c. */
785FNIEMOP_DEF(iemOp_vpabsb_Vx_Wx)
786{
787 IEMOP_MNEMONIC2(VEX_RM, VPABSB, vpabsb, Vx, Wx, DISOPTYPE_HARMLESS, 0);
788 IEMOPMEDIAOPTF2_INIT_VARS(vpabsb);
789 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
790}
791
792
793/* Opcode VEX.0F38 0x1d - invalid. */
794
795
796/** Opcode VEX.66.0F38 0x1d. */
797FNIEMOP_DEF(iemOp_vpabsw_Vx_Wx)
798{
799 IEMOP_MNEMONIC2(VEX_RM, VPABSW, vpabsw, Vx, Wx, DISOPTYPE_HARMLESS, 0);
800 IEMOPMEDIAOPTF2_INIT_VARS(vpabsw);
801 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
802}
803
804/* Opcode VEX.0F38 0x1e - invalid. */
805
806
807/** Opcode VEX.66.0F38 0x1e. */
808FNIEMOP_DEF(iemOp_vpabsd_Vx_Wx)
809{
810 IEMOP_MNEMONIC2(VEX_RM, VPABSD, vpabsd, Vx, Wx, DISOPTYPE_HARMLESS, 0);
811 IEMOPMEDIAOPTF2_INIT_VARS(vpabsd);
812 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
813}
814
815
816/* Opcode VEX.0F38 0x1f - invalid */
817/* Opcode VEX.66.0F38 0x1f - invalid */
818
819
820/** Body for the vpmov{s,z}x* instructions. */
821#define IEMOP_BODY_VPMOV_S_Z(a_Instr, a_SrcWidth, a_VexLengthMemFetch) \
822 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
823 if (IEM_IS_MODRM_REG_MODE(bRm)) \
824 { \
825 /* \
826 * Register, register. \
827 */ \
828 if (pVCpu->iem.s.uVexLength) \
829 { \
830 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
831 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2); \
832 IEM_MC_LOCAL(RTUINT256U, uDst); \
833 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
834 IEM_MC_ARG(PCRTUINT128U, puSrc, 1); \
835 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
836 IEM_MC_PREPARE_AVX_USAGE(); \
837 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
838 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
839 iemAImpl_ ## a_Instr ## _u256_fallback), \
840 puDst, puSrc); \
841 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
842 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
843 IEM_MC_END(); \
844 } \
845 else \
846 { \
847 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
848 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); \
849 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
850 IEM_MC_ARG(uint ## a_SrcWidth ##_t, uSrc, 1); \
851 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
852 IEM_MC_PREPARE_AVX_USAGE(); \
853 IEM_MC_FETCH_XREG_U ## a_SrcWidth (uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0); \
854 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
855 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
856 iemAImpl_## a_Instr ## _u128_fallback), \
857 puDst, uSrc); \
858 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
859 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
860 IEM_MC_END(); \
861 } \
862 } \
863 else \
864 { \
865 /* \
866 * Register, memory. \
867 */ \
868 if (pVCpu->iem.s.uVexLength) \
869 { \
870 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
871 IEM_MC_LOCAL(RTUINT256U, uDst); \
872 IEM_MC_LOCAL(RTUINT128U, uSrc); \
873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
874 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
875 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1); \
876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
877 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2); \
878 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
879 IEM_MC_PREPARE_AVX_USAGE(); \
880 a_VexLengthMemFetch(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
881 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
882 iemAImpl_ ## a_Instr ## _u256_fallback), \
883 puDst, puSrc); \
884 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
885 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
886 IEM_MC_END(); \
887 } \
888 else \
889 { \
890 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
892 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
893 IEM_MC_ARG(uint ## a_SrcWidth ##_t, uSrc, 1); \
894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
895 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); \
896 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
897 IEM_MC_PREPARE_AVX_USAGE(); \
898 IEM_MC_FETCH_MEM_U ## a_SrcWidth (uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
899 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
900 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
901 iemAImpl_ ## a_Instr ## _u128_fallback), \
902 puDst, uSrc); \
903 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
904 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
905 IEM_MC_END(); \
906 } \
907 } \
908 (void)0
909
910/** Opcode VEX.66.0F38 0x20. */
911FNIEMOP_DEF(iemOp_vpmovsxbw_Vx_UxMq)
912{
913 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
914 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXBW, vpmovsxbw, Vx, Wq, DISOPTYPE_HARMLESS, 0);
915 IEMOP_BODY_VPMOV_S_Z(vpmovsxbw, 64, IEM_MC_FETCH_MEM_U128_NO_AC);
916}
917
918
919/** Opcode VEX.66.0F38 0x21. */
920FNIEMOP_DEF(iemOp_vpmovsxbd_Vx_UxMd)
921{
922 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
923 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXBD, vpmovsxbd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
924 IEMOP_BODY_VPMOV_S_Z(vpmovsxbd, 32, IEM_MC_FETCH_MEM_U128);
925}
926
927
928/** Opcode VEX.66.0F38 0x22. */
929FNIEMOP_DEF(iemOp_vpmovsxbq_Vx_UxMw)
930{
931 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
932 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXBQ, vpmovsxbq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
933 IEMOP_BODY_VPMOV_S_Z(vpmovsxbq, 16, IEM_MC_FETCH_MEM_U128);
934}
935
936
937/** Opcode VEX.66.0F38 0x23. */
938FNIEMOP_DEF(iemOp_vpmovsxwd_Vx_UxMq)
939{
940 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
941 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXWD, vpmovsxwd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
942 IEMOP_BODY_VPMOV_S_Z(vpmovsxwd, 64, IEM_MC_FETCH_MEM_U128_NO_AC);
943}
944
945
946/** Opcode VEX.66.0F38 0x24. */
947FNIEMOP_DEF(iemOp_vpmovsxwq_Vx_UxMd)
948{
949 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
950 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXWQ, vpmovsxwq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
951 IEMOP_BODY_VPMOV_S_Z(vpmovsxwq, 32, IEM_MC_FETCH_MEM_U128);
952}
953
954
955/** Opcode VEX.66.0F38 0x25. */
956FNIEMOP_DEF(iemOp_vpmovsxdq_Vx_UxMq)
957{
958 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
959 IEMOP_MNEMONIC2(VEX_RM, VPMOVSXDQ, vpmovsxdq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
960 IEMOP_BODY_VPMOV_S_Z(vpmovsxdq, 64, IEM_MC_FETCH_MEM_U128_NO_AC);
961}
962
963
964/* Opcode VEX.66.0F38 0x26 - invalid */
965/* Opcode VEX.66.0F38 0x27 - invalid */
966
967
968/** Opcode VEX.66.0F38 0x28. */
969FNIEMOP_DEF(iemOp_vpmuldq_Vx_Hx_Wx)
970{
971 IEMOP_MNEMONIC3(VEX_RVM, VPMULDQ, vpmuldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
972 IEMOPMEDIAOPTF3_INIT_VARS(vpmuldq);
973 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
974}
975
976
977/** Opcode VEX.66.0F38 0x29. */
978FNIEMOP_DEF(iemOp_vpcmpeqq_Vx_Hx_Wx)
979{
980 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQQ, vpcmpeqq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
981 IEMOPMEDIAOPTF3_INIT_VARS(vpcmpeqq);
982 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
983}
984
985
986FNIEMOP_DEF(iemOp_vmovntdqa_Vx_Mx)
987{
988 Assert(pVCpu->iem.s.uVexLength <= 1);
989 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
990 if (IEM_IS_MODRM_MEM_MODE(bRm))
991 {
992 if (pVCpu->iem.s.uVexLength == 0)
993 {
994 /**
995 * @opcode 0x2a
996 * @opcodesub !11 mr/reg vex.l=0
997 * @oppfx 0x66
998 * @opcpuid avx
999 * @opgroup og_avx_cachect
1000 * @opxcpttype 1
1001 * @optest op1=-1 op2=2 -> op1=2
1002 * @optest op1=0 op2=-42 -> op1=-42
1003 */
1004 /* 128-bit: Memory, register. */
1005 IEMOP_MNEMONIC2EX(vmovntdqa_Vdq_WO_Mdq_L0, "vmovntdqa, Vdq_WO, Mdq", VEX_RM_MEM, VMOVNTDQA, vmovntdqa, Vx_WO, Mx,
1006 DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1007 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1008 IEM_MC_LOCAL(RTUINT128U, uSrc);
1009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1010
1011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1012 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1013 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1014 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1015
1016 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1017 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1018
1019 IEM_MC_ADVANCE_RIP_AND_FINISH();
1020 IEM_MC_END();
1021 }
1022 else
1023 {
1024 /**
1025 * @opdone
1026 * @opcode 0x2a
1027 * @opcodesub !11 mr/reg vex.l=1
1028 * @oppfx 0x66
1029 * @opcpuid avx2
1030 * @opgroup og_avx2_cachect
1031 * @opxcpttype 1
1032 * @optest op1=-1 op2=2 -> op1=2
1033 * @optest op1=0 op2=-42 -> op1=-42
1034 */
1035 /* 256-bit: Memory, register. */
1036 IEMOP_MNEMONIC2EX(vmovntdqa_Vqq_WO_Mqq_L1, "vmovntdqa, Vqq_WO,Mqq", VEX_RM_MEM, VMOVNTDQA, vmovntdqa, Vx_WO, Mx,
1037 DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1038 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1039 IEM_MC_LOCAL(RTUINT256U, uSrc);
1040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1041
1042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1043 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1044 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1045 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1046
1047 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1048 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1049
1050 IEM_MC_ADVANCE_RIP_AND_FINISH();
1051 IEM_MC_END();
1052 }
1053 }
1054
1055 /**
1056 * @opdone
1057 * @opmnemonic udvex660f382arg
1058 * @opcode 0x2a
1059 * @opcodesub 11 mr/reg
1060 * @oppfx 0x66
1061 * @opunused immediate
1062 * @opcpuid avx
1063 * @optest ->
1064 */
1065 else
1066 IEMOP_RAISE_INVALID_OPCODE_RET();
1067}
1068
1069
1070/** Opcode VEX.66.0F38 0x2b. */
1071FNIEMOP_DEF(iemOp_vpackusdw_Vx_Hx_Wx)
1072{
1073 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSDW, vpackusdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1074 IEMOPMEDIAOPTF3_INIT_VARS( vpackusdw);
1075 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1076}
1077
1078
1079/** Opcode VEX.66.0F38 0x2c. */
1080FNIEMOP_DEF(iemOp_vmaskmovps_Vx_Hx_Mx)
1081{
1082 // IEMOP_MNEMONIC3(RM, VMASKMOVPS, vmaskmovps, Vx, Hx, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
1083 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1084 if (!IEM_IS_MODRM_REG_MODE(bRm))
1085 {
1086 if (pVCpu->iem.s.uVexLength)
1087 {
1088 /*
1089 * YMM [ModRM:reg], YMM [vvvv], memory [ModRM:r/m]
1090 */
1091 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1092 IEM_MC_ARG_CONST(uint8_t, iYRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0);
1093 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1);
1094 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3);
1095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1096 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2);
1097 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1098
1099 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1100 IEM_MC_PREPARE_AVX_USAGE();
1101
1102 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovps_load_u256, iYRegDst, iYRegMsk, iEffSeg, GCPtrEffSrc);
1103
1104 IEM_MC_END();
1105 }
1106 else
1107 {
1108 /*
1109 * XMM [ModRM:reg], XMM [vvvv], memory [ModRM:r/m]
1110 */
1111 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1112 IEM_MC_ARG_CONST(uint8_t, iXRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0);
1113 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1);
1114 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3);
1115 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1116 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2);
1117 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1118
1119 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1120 IEM_MC_PREPARE_AVX_USAGE();
1121
1122 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovps_load_u128, iXRegDst, iXRegMsk, iEffSeg, GCPtrEffSrc);
1123
1124 IEM_MC_END();
1125 }
1126 }
1127 else
1128 {
1129 /* The register, register encoding is invalid. */
1130 IEMOP_RAISE_INVALID_OPCODE_RET();
1131 }
1132}
1133
1134
1135/** Opcode VEX.66.0F38 0x2d. */
1136FNIEMOP_DEF(iemOp_vmaskmovpd_Vx_Hx_Mx)
1137{
1138 // IEMOP_MNEMONIC3(RM, VMASKMOVPD, vmaskmovpd, Vx, Hx, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
1139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1140 if (!IEM_IS_MODRM_REG_MODE(bRm))
1141 {
1142 if (pVCpu->iem.s.uVexLength)
1143 {
1144 /*
1145 * YMM [ModRM:reg], YMM [vvvv], memory [ModRM:r/m]
1146 */
1147 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1148 IEM_MC_ARG_CONST(uint8_t, iYRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0);
1149 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1);
1150 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3);
1151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1152 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2);
1153 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1154
1155 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1156 IEM_MC_PREPARE_AVX_USAGE();
1157
1158 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovpd_load_u256, iYRegDst, iYRegMsk, iEffSeg, GCPtrEffSrc);
1159
1160 IEM_MC_END();
1161 }
1162 else
1163 {
1164 /*
1165 * XMM [ModRM:reg], XMM [vvvv], memory [ModRM:r/m]
1166 */
1167 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1168 IEM_MC_ARG_CONST(uint8_t, iXRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0);
1169 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1);
1170 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3);
1171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1172 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2);
1173 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1174
1175 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1176 IEM_MC_PREPARE_AVX_USAGE();
1177
1178 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovpd_load_u128, iXRegDst, iXRegMsk, iEffSeg, GCPtrEffSrc);
1179
1180 IEM_MC_END();
1181 }
1182 }
1183 else
1184 {
1185 /* The register, register encoding is invalid. */
1186 IEMOP_RAISE_INVALID_OPCODE_RET();
1187 }
1188}
1189
1190
1191/** Opcode VEX.66.0F38 0x2e. */
1192FNIEMOP_DEF(iemOp_vmaskmovps_Mx_Hx_Vx)
1193{
1194 // IEMOP_MNEMONIC3(RM, VMASKMOVPS, vmaskmovps, Mx, Hx, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
1195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1196 if (!IEM_IS_MODRM_REG_MODE(bRm))
1197 {
1198 if (pVCpu->iem.s.uVexLength)
1199 {
1200 /*
1201 * memory [ModRM:r/m], YMM [vvvv], YMM [ModRM:reg]
1202 */
1203 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1204
1205 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1207 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1208 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1209 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2);
1210 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3);
1211
1212 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1213 IEM_MC_PREPARE_AVX_USAGE();
1214
1215 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovps_store_u256, iEffSeg, GCPtrEffDst, iYRegMsk, iYRegSrc);
1216
1217 IEM_MC_END();
1218 }
1219 else
1220 {
1221 /*
1222 * memory [ModRM:r/m], XMM [vvvv], XMM [ModRM:reg]
1223 */
1224 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1225
1226 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1228 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1229 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1230 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2);
1231 IEM_MC_ARG_CONST(uint8_t, iXRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3);
1232
1233 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1234 IEM_MC_PREPARE_AVX_USAGE();
1235
1236 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovps_store_u128, iEffSeg, GCPtrEffDst, iXRegMsk, iXRegSrc);
1237
1238 IEM_MC_END();
1239 }
1240 }
1241 else
1242 {
1243 /* The register, register encoding is invalid. */
1244 IEMOP_RAISE_INVALID_OPCODE_RET();
1245 }
1246}
1247
1248
1249/** Opcode VEX.66.0F38 0x2f. */
1250FNIEMOP_DEF(iemOp_vmaskmovpd_Mx_Hx_Vx)
1251{
1252 // IEMOP_MNEMONIC3(RM, VMASKMOVPD, vmaskmovpd, Mx, Hx, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
1253 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1254 if (!IEM_IS_MODRM_REG_MODE(bRm))
1255 {
1256 if (pVCpu->iem.s.uVexLength)
1257 {
1258 /*
1259 * memory [ModRM:r/m], YMM [vvvv], YMM [ModRM:reg]
1260 */
1261 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1262
1263 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1265 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1266 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1267 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2);
1268 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3);
1269
1270 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1271 IEM_MC_PREPARE_AVX_USAGE();
1272
1273 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovpd_store_u256, iEffSeg, GCPtrEffDst, iYRegMsk, iYRegSrc);
1274
1275 IEM_MC_END();
1276 }
1277 else
1278 {
1279 /*
1280 * memory [ModRM:r/m], XMM [vvvv], XMM [ModRM:reg]
1281 */
1282 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1283
1284 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
1285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1286 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
1287 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1288 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2);
1289 IEM_MC_ARG_CONST(uint8_t, iXRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3);
1290
1291 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1292 IEM_MC_PREPARE_AVX_USAGE();
1293
1294 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vmaskmovpd_store_u128, iEffSeg, GCPtrEffDst, iXRegMsk, iXRegSrc);
1295
1296 IEM_MC_END();
1297 }
1298 }
1299 else
1300 {
1301 /* The register, register encoding is invalid. */
1302 IEMOP_RAISE_INVALID_OPCODE_RET();
1303 }
1304}
1305
1306
1307/** Opcode VEX.66.0F38 0x30. */
1308FNIEMOP_DEF(iemOp_vpmovzxbw_Vx_UxMq)
1309{
1310 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1311 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXBW, vpmovzxbw, Vx, Wq, DISOPTYPE_HARMLESS, 0);
1312 IEMOP_BODY_VPMOV_S_Z(vpmovzxbw, 64, IEM_MC_FETCH_MEM_U128_NO_AC);
1313}
1314
1315
1316/** Opcode VEX.66.0F38 0x31. */
1317FNIEMOP_DEF(iemOp_vpmovzxbd_Vx_UxMd)
1318{
1319 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1320 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXBD, vpmovzxbd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
1321 IEMOP_BODY_VPMOV_S_Z(vpmovzxbd, 32, IEM_MC_FETCH_MEM_U128);
1322}
1323
1324
1325/** Opcode VEX.66.0F38 0x32. */
1326FNIEMOP_DEF(iemOp_vpmovzxbq_Vx_UxMw)
1327{
1328 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1329 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXBQ, vpmovzxbq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
1330 IEMOP_BODY_VPMOV_S_Z(vpmovzxbq, 16, IEM_MC_FETCH_MEM_U128);
1331}
1332
1333
1334/** Opcode VEX.66.0F38 0x33. */
1335FNIEMOP_DEF(iemOp_vpmovzxwd_Vx_UxMq)
1336{
1337 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1338 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXWD, vpmovzxwd, Vx, Wq, DISOPTYPE_HARMLESS, 0);
1339 IEMOP_BODY_VPMOV_S_Z(vpmovzxwd, 64, IEM_MC_FETCH_MEM_U128_NO_AC);
1340}
1341
1342
1343/** Opcode VEX.66.0F38 0x34. */
1344FNIEMOP_DEF(iemOp_vpmovzxwq_Vx_UxMd)
1345{
1346 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1347 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXWQ, vpmovzxwq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
1348 IEMOP_BODY_VPMOV_S_Z(vpmovzxwq, 32, IEM_MC_FETCH_MEM_U128);
1349}
1350
1351
1352/** Opcode VEX.66.0F38 0x35. */
1353FNIEMOP_DEF(iemOp_vpmovzxdq_Vx_UxMq)
1354{
1355 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1356 IEMOP_MNEMONIC2(VEX_RM, VPMOVZXDQ, vpmovzxdq, Vx, Wq, DISOPTYPE_HARMLESS, 0);
1357 IEMOP_BODY_VPMOV_S_Z(vpmovzxdq, 64, IEM_MC_FETCH_MEM_U128_NO_AC);
1358}
1359
1360
1361/* Opcode VEX.66.0F38 0x36. */
1362FNIEMOP_DEF(iemOp_vpermd_Vqq_Hqq_Wqq)
1363{
1364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1365 if (IEM_IS_MODRM_REG_MODE(bRm))
1366 {
1367 /*
1368 * Register, register.
1369 */
1370 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1371 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1372 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1373 IEM_MC_PREPARE_AVX_USAGE();
1374 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1375 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1376 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1377 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1378 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1379 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1380 IEM_MC_LOCAL(RTUINT256U, uDst);
1381 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1382 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpermd_u256, iemAImpl_vpermd_u256_fallback),
1383 puDst, puSrc1, puSrc2);
1384 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1385 IEM_MC_ADVANCE_RIP_AND_FINISH();
1386 IEM_MC_END();
1387 }
1388 else
1389 {
1390 /*
1391 * Register, memory.
1392 */
1393 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1396 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1397 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1398 IEM_MC_PREPARE_AVX_USAGE();
1399 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1400 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1401 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1402 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1403 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1404 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1405 IEM_MC_LOCAL(RTUINT256U, uDst);
1406 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1407 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpermd_u256, iemAImpl_vpermd_u256_fallback),
1408 puDst, puSrc1, puSrc2);
1409 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1410 IEM_MC_ADVANCE_RIP_AND_FINISH();
1411 IEM_MC_END();
1412 }
1413}
1414
1415
1416/** Opcode VEX.66.0F38 0x37. */
1417FNIEMOP_DEF(iemOp_vpcmpgtq_Vx_Hx_Wx)
1418{
1419 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTQ, vpcmpgtq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1420 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtq);
1421 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1422}
1423
1424
1425/** Opcode VEX.66.0F38 0x38. */
1426FNIEMOP_DEF(iemOp_vpminsb_Vx_Hx_Wx)
1427{
1428 IEMOP_MNEMONIC3(VEX_RVM, VPMINSB, vpminsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1429 IEMOPMEDIAOPTF3_INIT_VARS( vpminsb);
1430 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1431}
1432
1433
1434/** Opcode VEX.66.0F38 0x39. */
1435FNIEMOP_DEF(iemOp_vpminsd_Vx_Hx_Wx)
1436{
1437 IEMOP_MNEMONIC3(VEX_RVM, VPMINSD, vpminsd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1438 IEMOPMEDIAOPTF3_INIT_VARS( vpminsd);
1439 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1440}
1441
1442
1443/** Opcode VEX.66.0F38 0x3a. */
1444FNIEMOP_DEF(iemOp_vpminuw_Vx_Hx_Wx)
1445{
1446 IEMOP_MNEMONIC3(VEX_RVM, VPMINUW, vpminuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1447 IEMOPMEDIAOPTF3_INIT_VARS( vpminuw);
1448 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1449}
1450
1451
1452/** Opcode VEX.66.0F38 0x3b. */
1453FNIEMOP_DEF(iemOp_vpminud_Vx_Hx_Wx)
1454{
1455 IEMOP_MNEMONIC3(VEX_RVM, VPMINUD, vpminud, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1456 IEMOPMEDIAOPTF3_INIT_VARS( vpminud);
1457 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1458}
1459
1460
1461/** Opcode VEX.66.0F38 0x3c. */
1462FNIEMOP_DEF(iemOp_vpmaxsb_Vx_Hx_Wx)
1463{
1464 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSB, vpmaxsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1465 IEMOPMEDIAOPTF3_INIT_VARS( vpmaxsb);
1466 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1467}
1468
1469
1470/** Opcode VEX.66.0F38 0x3d. */
1471FNIEMOP_DEF(iemOp_vpmaxsd_Vx_Hx_Wx)
1472{
1473 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSD, vpmaxsd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1474 IEMOPMEDIAOPTF3_INIT_VARS( vpmaxsd);
1475 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1476}
1477
1478
1479/** Opcode VEX.66.0F38 0x3e. */
1480FNIEMOP_DEF(iemOp_vpmaxuw_Vx_Hx_Wx)
1481{
1482 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUW, vpmaxuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1483 IEMOPMEDIAOPTF3_INIT_VARS( vpmaxuw);
1484 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1485}
1486
1487
1488/** Opcode VEX.66.0F38 0x3f. */
1489FNIEMOP_DEF(iemOp_vpmaxud_Vx_Hx_Wx)
1490{
1491 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUD, vpmaxud, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1492 IEMOPMEDIAOPTF3_INIT_VARS( vpmaxud);
1493 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1494}
1495
1496
1497/** Opcode VEX.66.0F38 0x40. */
1498FNIEMOP_DEF(iemOp_vpmulld_Vx_Hx_Wx)
1499{
1500 IEMOP_MNEMONIC3(VEX_RVM, VPMULLD, vpmulld, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1501 IEMOPMEDIAOPTF3_INIT_VARS(vpmulld);
1502 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1503}
1504
1505
1506/** Opcode VEX.66.0F38 0x41. */
1507FNIEMOP_DEF(iemOp_vphminposuw_Vdq_Wdq)
1508{
1509 IEMOP_MNEMONIC2(VEX_RM, VPHMINPOSUW, vphminposuw, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1510 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1511 if (IEM_IS_MODRM_REG_MODE(bRm))
1512 {
1513 /*
1514 * Register, register.
1515 */
1516 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1517 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1518 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1519 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1520 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1521 IEM_MC_PREPARE_AVX_USAGE();
1522 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1523 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1524 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vphminposuw_u128, iemAImpl_vphminposuw_u128_fallback),
1525 puDst, puSrc);
1526 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1527 IEM_MC_ADVANCE_RIP_AND_FINISH();
1528 IEM_MC_END();
1529 }
1530 else
1531 {
1532 /*
1533 * Register, memory.
1534 */
1535 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1536 IEM_MC_LOCAL(RTUINT128U, uSrc);
1537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1538 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1539 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1540
1541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1542 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1543 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1544 IEM_MC_PREPARE_AVX_USAGE();
1545
1546 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1547 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1548 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vphminposuw_u128, iemAImpl_vphminposuw_u128_fallback),
1549 puDst, puSrc);
1550 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1551
1552 IEM_MC_ADVANCE_RIP_AND_FINISH();
1553 IEM_MC_END();
1554 }
1555}
1556
1557
1558/* Opcode VEX.66.0F38 0x42 - invalid. */
1559/* Opcode VEX.66.0F38 0x43 - invalid. */
1560/* Opcode VEX.66.0F38 0x44 - invalid. */
1561
1562
1563/** Opcode VEX.66.0F38 0x45. */
1564FNIEMOP_DEF(iemOp_vpsrlvd_q_Vx_Hx_Wx)
1565{
1566 IEMOP_MNEMONIC3(VEX_RVM, VPSRLVD, vpsrlvd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1567
1568 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1569 {
1570 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlvq);
1571 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1572 }
1573 else
1574 {
1575 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlvd);
1576 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1577 }
1578}
1579
1580
1581/** Opcode VEX.66.0F38 0x46. */
1582FNIEMOP_DEF(iemOp_vpsravd_Vx_Hx_Wx)
1583{
1584 IEMOP_MNEMONIC3(VEX_RVM, VPSRAVD, vpsravd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1585 IEMOPMEDIAOPTF3_INIT_VARS(vpsravd);
1586 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1587}
1588
1589
1590/** Opcode VEX.66.0F38 0x47. */
1591FNIEMOP_DEF(iemOp_vpsllvd_q_Vx_Hx_Wx)
1592{
1593 IEMOP_MNEMONIC3(VEX_RVM, VPSLLVD, vpsllvd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
1594
1595 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1596 {
1597 IEMOPMEDIAOPTF3_INIT_VARS(vpsllvq);
1598 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1599 }
1600 else
1601 {
1602 IEMOPMEDIAOPTF3_INIT_VARS(vpsllvd);
1603 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1604 }
1605}
1606
1607
1608/* Opcode VEX.66.0F38 0x48 - invalid. */
1609/* Opcode VEX.66.0F38 0x49 - invalid. */
1610/* Opcode VEX.66.0F38 0x4a - invalid. */
1611/* Opcode VEX.66.0F38 0x4b - invalid. */
1612/* Opcode VEX.66.0F38 0x4c - invalid. */
1613/* Opcode VEX.66.0F38 0x4d - invalid. */
1614/* Opcode VEX.66.0F38 0x4e - invalid. */
1615/* Opcode VEX.66.0F38 0x4f - invalid. */
1616
1617/* Opcode VEX.66.0F38 0x50 - invalid. */
1618/* Opcode VEX.66.0F38 0x51 - invalid. */
1619/* Opcode VEX.66.0F38 0x52 - invalid. */
1620/* Opcode VEX.66.0F38 0x53 - invalid. */
1621/* Opcode VEX.66.0F38 0x54 - invalid. */
1622/* Opcode VEX.66.0F38 0x55 - invalid. */
1623/* Opcode VEX.66.0F38 0x56 - invalid. */
1624/* Opcode VEX.66.0F38 0x57 - invalid. */
1625
1626
1627/** Opcode VEX.66.0F38 0x58. */
1628FNIEMOP_DEF(iemOp_vpbroadcastd_Vx_Wx)
1629{
1630 IEMOP_MNEMONIC2(VEX_RM, VPBROADCASTD, vpbroadcastd, Vx, Wx, DISOPTYPE_HARMLESS, 0);
1631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1632 if (IEM_IS_MODRM_REG_MODE(bRm))
1633 {
1634 /*
1635 * Register, register.
1636 */
1637 if (pVCpu->iem.s.uVexLength)
1638 {
1639 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1640 IEM_MC_LOCAL(uint32_t, uSrc);
1641
1642 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1643 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1644 IEM_MC_PREPARE_AVX_USAGE();
1645
1646 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1647 IEM_MC_BROADCAST_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1648
1649 IEM_MC_ADVANCE_RIP_AND_FINISH();
1650 IEM_MC_END();
1651 }
1652 else
1653 {
1654 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1655 IEM_MC_LOCAL(uint32_t, uSrc);
1656
1657 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1658 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1659 IEM_MC_PREPARE_AVX_USAGE();
1660 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1661 IEM_MC_BROADCAST_XREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1662
1663 IEM_MC_ADVANCE_RIP_AND_FINISH();
1664 IEM_MC_END();
1665 }
1666 }
1667 else
1668 {
1669 /*
1670 * Register, memory.
1671 */
1672 if (pVCpu->iem.s.uVexLength)
1673 {
1674 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1675 IEM_MC_LOCAL(uint32_t, uSrc);
1676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1677
1678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1679 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1680 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1681 IEM_MC_PREPARE_AVX_USAGE();
1682
1683 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1684 IEM_MC_BROADCAST_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1685
1686 IEM_MC_ADVANCE_RIP_AND_FINISH();
1687 IEM_MC_END();
1688 }
1689 else
1690 {
1691 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1692 IEM_MC_LOCAL(uint32_t, uSrc);
1693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1694
1695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1696 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1697 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1698 IEM_MC_PREPARE_AVX_USAGE();
1699
1700 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1701 IEM_MC_BROADCAST_XREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1702
1703 IEM_MC_ADVANCE_RIP_AND_FINISH();
1704 IEM_MC_END();
1705 }
1706 }
1707}
1708
1709
1710/** Opcode VEX.66.0F38 0x59. */
1711FNIEMOP_DEF(iemOp_vpbroadcastq_Vx_Wx)
1712{
1713 IEMOP_MNEMONIC2(VEX_RM, VPBROADCASTQ, vpbroadcastq, Vx, Wx, DISOPTYPE_HARMLESS, 0);
1714 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1715 if (IEM_IS_MODRM_REG_MODE(bRm))
1716 {
1717 /*
1718 * Register, register.
1719 */
1720 if (pVCpu->iem.s.uVexLength)
1721 {
1722 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1723 IEM_MC_LOCAL(uint64_t, uSrc);
1724
1725 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1726 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1727 IEM_MC_PREPARE_AVX_USAGE();
1728
1729 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1730 IEM_MC_BROADCAST_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1731
1732 IEM_MC_ADVANCE_RIP_AND_FINISH();
1733 IEM_MC_END();
1734 }
1735 else
1736 {
1737 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1738 IEM_MC_LOCAL(uint64_t, uSrc);
1739
1740 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1741 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1742 IEM_MC_PREPARE_AVX_USAGE();
1743 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1744 IEM_MC_BROADCAST_XREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1745
1746 IEM_MC_ADVANCE_RIP_AND_FINISH();
1747 IEM_MC_END();
1748 }
1749 }
1750 else
1751 {
1752 /*
1753 * Register, memory.
1754 */
1755 if (pVCpu->iem.s.uVexLength)
1756 {
1757 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1758 IEM_MC_LOCAL(uint64_t, uSrc);
1759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1760
1761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1762 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1763 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1764 IEM_MC_PREPARE_AVX_USAGE();
1765
1766 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1767 IEM_MC_BROADCAST_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1768
1769 IEM_MC_ADVANCE_RIP_AND_FINISH();
1770 IEM_MC_END();
1771 }
1772 else
1773 {
1774 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1775 IEM_MC_LOCAL(uint64_t, uSrc);
1776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1777
1778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1779 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1780 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1781 IEM_MC_PREPARE_AVX_USAGE();
1782
1783 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1784 IEM_MC_BROADCAST_XREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1785
1786 IEM_MC_ADVANCE_RIP_AND_FINISH();
1787 IEM_MC_END();
1788 }
1789 }
1790}
1791
1792
1793/** Opcode VEX.66.0F38 0x5a. */
1794FNIEMOP_DEF(iemOp_vbroadcasti128_Vqq_Mdq)
1795{
1796 IEMOP_MNEMONIC2(VEX_RM, VBROADCASTI128, vbroadcasti128, Vx, Wx, DISOPTYPE_HARMLESS, 0);
1797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1798 if (IEM_IS_MODRM_REG_MODE(bRm))
1799 {
1800 /*
1801 * No register, register.
1802 */
1803 IEMOP_RAISE_INVALID_OPCODE_RET();
1804 }
1805 else
1806 {
1807 /*
1808 * Register, memory.
1809 */
1810 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1811 IEM_MC_LOCAL(RTUINT128U, uSrc);
1812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1813
1814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1815 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx);
1816 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1817 IEM_MC_PREPARE_AVX_USAGE();
1818
1819 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1820 IEM_MC_BROADCAST_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1821
1822 IEM_MC_ADVANCE_RIP_AND_FINISH();
1823 IEM_MC_END();
1824 }
1825}
1826
1827
1828/* Opcode VEX.66.0F38 0x5b - invalid. */
1829/* Opcode VEX.66.0F38 0x5c - invalid. */
1830/* Opcode VEX.66.0F38 0x5d - invalid. */
1831/* Opcode VEX.66.0F38 0x5e - invalid. */
1832/* Opcode VEX.66.0F38 0x5f - invalid. */
1833
1834/* Opcode VEX.66.0F38 0x60 - invalid. */
1835/* Opcode VEX.66.0F38 0x61 - invalid. */
1836/* Opcode VEX.66.0F38 0x62 - invalid. */
1837/* Opcode VEX.66.0F38 0x63 - invalid. */
1838/* Opcode VEX.66.0F38 0x64 - invalid. */
1839/* Opcode VEX.66.0F38 0x65 - invalid. */
1840/* Opcode VEX.66.0F38 0x66 - invalid. */
1841/* Opcode VEX.66.0F38 0x67 - invalid. */
1842/* Opcode VEX.66.0F38 0x68 - invalid. */
1843/* Opcode VEX.66.0F38 0x69 - invalid. */
1844/* Opcode VEX.66.0F38 0x6a - invalid. */
1845/* Opcode VEX.66.0F38 0x6b - invalid. */
1846/* Opcode VEX.66.0F38 0x6c - invalid. */
1847/* Opcode VEX.66.0F38 0x6d - invalid. */
1848/* Opcode VEX.66.0F38 0x6e - invalid. */
1849/* Opcode VEX.66.0F38 0x6f - invalid. */
1850
1851/* Opcode VEX.66.0F38 0x70 - invalid. */
1852/* Opcode VEX.66.0F38 0x71 - invalid. */
1853/* Opcode VEX.66.0F38 0x72 - invalid. */
1854/* Opcode VEX.66.0F38 0x73 - invalid. */
1855/* Opcode VEX.66.0F38 0x74 - invalid. */
1856/* Opcode VEX.66.0F38 0x75 - invalid. */
1857/* Opcode VEX.66.0F38 0x76 - invalid. */
1858/* Opcode VEX.66.0F38 0x77 - invalid. */
1859
1860
1861/** Opcode VEX.66.0F38 0x78. */
1862FNIEMOP_DEF(iemOp_vpbroadcastb_Vx_Wx)
1863{
1864 IEMOP_MNEMONIC2(VEX_RM, VPBROADCASTB, vpbroadcastb, Vx, Wx, DISOPTYPE_HARMLESS, 0);
1865 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1866 if (IEM_IS_MODRM_REG_MODE(bRm))
1867 {
1868 /*
1869 * Register, register.
1870 */
1871 if (pVCpu->iem.s.uVexLength)
1872 {
1873 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1874 IEM_MC_LOCAL(uint8_t, uSrc);
1875
1876 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1877 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1878 IEM_MC_PREPARE_AVX_USAGE();
1879
1880 IEM_MC_FETCH_XREG_U8(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1881 IEM_MC_BROADCAST_YREG_U8_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1882
1883 IEM_MC_ADVANCE_RIP_AND_FINISH();
1884 IEM_MC_END();
1885 }
1886 else
1887 {
1888 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1889 IEM_MC_LOCAL(uint8_t, uSrc);
1890
1891 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1892 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1893 IEM_MC_PREPARE_AVX_USAGE();
1894 IEM_MC_FETCH_XREG_U8(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1895 IEM_MC_BROADCAST_XREG_U8_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1896
1897 IEM_MC_ADVANCE_RIP_AND_FINISH();
1898 IEM_MC_END();
1899 }
1900 }
1901 else
1902 {
1903 /*
1904 * Register, memory.
1905 */
1906 if (pVCpu->iem.s.uVexLength)
1907 {
1908 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1909 IEM_MC_LOCAL(uint8_t, uSrc);
1910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1911
1912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1913 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1914 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1915 IEM_MC_PREPARE_AVX_USAGE();
1916
1917 IEM_MC_FETCH_MEM_U8(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1918 IEM_MC_BROADCAST_YREG_U8_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1919
1920 IEM_MC_ADVANCE_RIP_AND_FINISH();
1921 IEM_MC_END();
1922 }
1923 else
1924 {
1925 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1926 IEM_MC_LOCAL(uint8_t, uSrc);
1927 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1928
1929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1930 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1931 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1932 IEM_MC_PREPARE_AVX_USAGE();
1933
1934 IEM_MC_FETCH_MEM_U8(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1935 IEM_MC_BROADCAST_XREG_U8_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1936
1937 IEM_MC_ADVANCE_RIP_AND_FINISH();
1938 IEM_MC_END();
1939 }
1940 }
1941}
1942
1943
1944/** Opcode VEX.66.0F38 0x79. */
1945FNIEMOP_DEF(iemOp_vpbroadcastw_Vx_Wx)
1946{
1947 IEMOP_MNEMONIC2(VEX_RM, VPBROADCASTW, vpbroadcastw, Vx, Wx, DISOPTYPE_HARMLESS, 0);
1948 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1949 if (IEM_IS_MODRM_REG_MODE(bRm))
1950 {
1951 /*
1952 * Register, register.
1953 */
1954 if (pVCpu->iem.s.uVexLength)
1955 {
1956 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1957 IEM_MC_LOCAL(uint16_t, uSrc);
1958
1959 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1960 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1961 IEM_MC_PREPARE_AVX_USAGE();
1962
1963 IEM_MC_FETCH_XREG_U16(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1964 IEM_MC_BROADCAST_YREG_U16_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1965
1966 IEM_MC_ADVANCE_RIP_AND_FINISH();
1967 IEM_MC_END();
1968 }
1969 else
1970 {
1971 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1972 IEM_MC_LOCAL(uint16_t, uSrc);
1973
1974 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1975 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1976 IEM_MC_PREPARE_AVX_USAGE();
1977 IEM_MC_FETCH_XREG_U16(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1978 IEM_MC_BROADCAST_XREG_U16_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1979
1980 IEM_MC_ADVANCE_RIP_AND_FINISH();
1981 IEM_MC_END();
1982 }
1983 }
1984 else
1985 {
1986 /*
1987 * Register, memory.
1988 */
1989 if (pVCpu->iem.s.uVexLength)
1990 {
1991 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1992 IEM_MC_LOCAL(uint16_t, uSrc);
1993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1994
1995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1996 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
1997 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1998 IEM_MC_PREPARE_AVX_USAGE();
1999
2000 IEM_MC_FETCH_MEM_U16(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2001 IEM_MC_BROADCAST_YREG_U16_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2002
2003 IEM_MC_ADVANCE_RIP_AND_FINISH();
2004 IEM_MC_END();
2005 }
2006 else
2007 {
2008 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2009 IEM_MC_LOCAL(uint16_t, uSrc);
2010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2011
2012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2013 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
2014 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2015 IEM_MC_PREPARE_AVX_USAGE();
2016
2017 IEM_MC_FETCH_MEM_U16(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2018 IEM_MC_BROADCAST_XREG_U16_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2019
2020 IEM_MC_ADVANCE_RIP_AND_FINISH();
2021 IEM_MC_END();
2022 }
2023 }
2024}
2025
2026
2027/* Opcode VEX.66.0F38 0x7a - invalid. */
2028/* Opcode VEX.66.0F38 0x7b - invalid. */
2029/* Opcode VEX.66.0F38 0x7c - invalid. */
2030/* Opcode VEX.66.0F38 0x7d - invalid. */
2031/* Opcode VEX.66.0F38 0x7e - invalid. */
2032/* Opcode VEX.66.0F38 0x7f - invalid. */
2033
2034/* Opcode VEX.66.0F38 0x80 - invalid (legacy only). */
2035/* Opcode VEX.66.0F38 0x81 - invalid (legacy only). */
2036/* Opcode VEX.66.0F38 0x82 - invalid (legacy only). */
2037/* Opcode VEX.66.0F38 0x83 - invalid. */
2038/* Opcode VEX.66.0F38 0x84 - invalid. */
2039/* Opcode VEX.66.0F38 0x85 - invalid. */
2040/* Opcode VEX.66.0F38 0x86 - invalid. */
2041/* Opcode VEX.66.0F38 0x87 - invalid. */
2042/* Opcode VEX.66.0F38 0x88 - invalid. */
2043/* Opcode VEX.66.0F38 0x89 - invalid. */
2044/* Opcode VEX.66.0F38 0x8a - invalid. */
2045/* Opcode VEX.66.0F38 0x8b - invalid. */
2046
2047
2048/** Opcode VEX.66.0F38 0x8c. */
2049FNIEMOP_DEF(iemOp_vpmaskmovd_q_Vx_Hx_Mx)
2050{
2051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2052 if (!IEM_IS_MODRM_REG_MODE(bRm))
2053 {
2054 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2055 {
2056 // IEMOP_MNEMONIC3(RM, VPMASKMOVQ, vpmaskmovq, Vx, Hx, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
2057 if (pVCpu->iem.s.uVexLength)
2058 {
2059 /*
2060 * YMM [ModRM:reg], YMM [vvvv], memory [ModRM:r/m]
2061 */
2062 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2063 IEM_MC_ARG_CONST(uint8_t, iYRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0);
2064 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1);
2065 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3);
2066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2067 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2);
2068 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2069
2070 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2071 IEM_MC_PREPARE_AVX_USAGE();
2072
2073 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovq_load_u256, iYRegDst, iYRegMsk, iEffSeg, GCPtrEffSrc);
2074
2075 IEM_MC_END();
2076 }
2077 else
2078 {
2079 /*
2080 * XMM [ModRM:reg], XMM [vvvv], memory [ModRM:r/m]
2081 */
2082 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2083 IEM_MC_ARG_CONST(uint8_t, iXRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0);
2084 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1);
2085 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3);
2086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2087 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2);
2088 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2089
2090 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2091 IEM_MC_PREPARE_AVX_USAGE();
2092
2093 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovq_load_u128, iXRegDst, iXRegMsk, iEffSeg, GCPtrEffSrc);
2094
2095 IEM_MC_END();
2096 }
2097 }
2098 else
2099 {
2100 // IEMOP_MNEMONIC3(RM, VPMASKMOVD, vpmaskmovd, Vx, Hx, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
2101 if (pVCpu->iem.s.uVexLength)
2102 {
2103 /*
2104 * YMM [ModRM:reg], YMM [vvvv], memory [ModRM:r/m]
2105 */
2106 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2107 IEM_MC_ARG_CONST(uint8_t, iYRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0);
2108 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1);
2109 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3);
2110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2111 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2);
2112 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2113
2114 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2115 IEM_MC_PREPARE_AVX_USAGE();
2116
2117 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovd_load_u256, iYRegDst, iYRegMsk, iEffSeg, GCPtrEffSrc);
2118
2119 IEM_MC_END();
2120 }
2121 else
2122 {
2123 /*
2124 * XMM [ModRM:reg], XMM [vvvv], memory [ModRM:r/m]
2125 */
2126 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2127 IEM_MC_ARG_CONST(uint8_t, iXRegDst, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 0);
2128 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 1);
2129 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 3);
2130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2131 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 2);
2132 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2133
2134 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2135 IEM_MC_PREPARE_AVX_USAGE();
2136
2137 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovd_load_u128, iXRegDst, iXRegMsk, iEffSeg, GCPtrEffSrc);
2138
2139 IEM_MC_END();
2140 }
2141 }
2142 }
2143 else
2144 {
2145 /* The register, register encoding is invalid. */
2146 IEMOP_RAISE_INVALID_OPCODE_RET();
2147 }
2148}
2149
2150
2151/* Opcode VEX.66.0F38 0x8d - invalid. */
2152/** Opcode VEX.66.0F38 0x8e. */
2153
2154
2155/** Opcode VEX.66.0F38 0x8e. */
2156FNIEMOP_DEF(iemOp_vpmaskmovd_q_Mx_Vx_Hx)
2157{
2158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2159 if (!IEM_IS_MODRM_REG_MODE(bRm))
2160 {
2161 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2162 {
2163 // IEMOP_MNEMONIC3(RM, VPMASKMOVQ, vpmaskmovq, Mx, Hx, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
2164 if (pVCpu->iem.s.uVexLength)
2165 {
2166 /*
2167 * memory [ModRM:r/m], YMM [vvvv], YMM [ModRM:reg]
2168 */
2169 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2170
2171 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
2172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2173 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
2174 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2175 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2);
2176 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3);
2177
2178 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2179 IEM_MC_PREPARE_AVX_USAGE();
2180
2181 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovq_store_u256, iEffSeg, GCPtrEffDst, iYRegMsk, iYRegSrc);
2182
2183 IEM_MC_END();
2184 }
2185 else
2186 {
2187 /*
2188 * memory [ModRM:r/m], XMM [vvvv], XMM [ModRM:reg]
2189 */
2190 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2191
2192 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
2193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2194 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
2195 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2196 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2);
2197 IEM_MC_ARG_CONST(uint8_t, iXRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3);
2198
2199 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2200 IEM_MC_PREPARE_AVX_USAGE();
2201
2202 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovq_store_u128, iEffSeg, GCPtrEffDst, iXRegMsk, iXRegSrc);
2203
2204 IEM_MC_END();
2205 }
2206 }
2207 else
2208 {
2209 // IEMOP_MNEMONIC3(RM, VPMASKMOVD, vpmaskmovd, Mx, Hx, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
2210 if (pVCpu->iem.s.uVexLength)
2211 {
2212 /*
2213 * memory [ModRM:r/m], YMM [vvvv], YMM [ModRM:reg]
2214 */
2215 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2216
2217 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
2218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2219 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
2220 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2221 IEM_MC_ARG_CONST(uint8_t, iYRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2);
2222 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3);
2223
2224 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2225 IEM_MC_PREPARE_AVX_USAGE();
2226
2227 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovd_store_u256, iEffSeg, GCPtrEffDst, iYRegMsk, iYRegSrc);
2228
2229 IEM_MC_END();
2230 }
2231 else
2232 {
2233 /*
2234 * memory [ModRM:r/m], XMM [vvvv], XMM [ModRM:reg]
2235 */
2236 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2237
2238 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
2239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2240 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
2241 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2242 IEM_MC_ARG_CONST(uint8_t, iXRegMsk, /*=*/ IEM_GET_EFFECTIVE_VVVV(pVCpu), 2);
2243 IEM_MC_ARG_CONST(uint8_t, iXRegSrc, /*=*/ IEM_GET_MODRM_REG(pVCpu, bRm), 3);
2244
2245 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2246 IEM_MC_PREPARE_AVX_USAGE();
2247
2248 IEM_MC_CALL_CIMPL_4(0, 0, iemCImpl_vpmaskmovd_store_u128, iEffSeg, GCPtrEffDst, iXRegMsk, iXRegSrc);
2249
2250 IEM_MC_END();
2251 }
2252 }
2253 }
2254 else
2255 {
2256 /* The register, register encoding is invalid. */
2257 IEMOP_RAISE_INVALID_OPCODE_RET();
2258 }
2259}
2260
2261
2262/* Opcode VEX.66.0F38 0x8f - invalid. */
2263
2264/** Opcode VEX.66.0F38 0x90 (vex only). */
2265FNIEMOP_STUB(iemOp_vpgatherdd_q_Vx_Hx_Wx);
2266/** Opcode VEX.66.0F38 0x91 (vex only). */
2267FNIEMOP_STUB(iemOp_vpgatherqd_q_Vx_Hx_Wx);
2268/** Opcode VEX.66.0F38 0x92 (vex only). */
2269FNIEMOP_STUB(iemOp_vgatherdps_d_Vx_Hx_Wx);
2270/** Opcode VEX.66.0F38 0x93 (vex only). */
2271FNIEMOP_STUB(iemOp_vgatherqps_d_Vx_Hx_Wx);
2272/* Opcode VEX.66.0F38 0x94 - invalid. */
2273/* Opcode VEX.66.0F38 0x95 - invalid. */
2274/** Opcode VEX.66.0F38 0x96 (vex only). */
2275FNIEMOP_STUB(iemOp_vfmaddsub132ps_d_Vx_Hx_Wx);
2276/** Opcode VEX.66.0F38 0x97 (vex only). */
2277FNIEMOP_STUB(iemOp_vfmsubadd132ps_d_Vx_Hx_Wx);
2278/** Opcode VEX.66.0F38 0x98 (vex only). */
2279FNIEMOP_STUB(iemOp_vfmadd132ps_d_Vx_Hx_Wx);
2280/** Opcode VEX.66.0F38 0x99 (vex only). */
2281FNIEMOP_STUB(iemOp_vfmadd132ss_d_Vx_Hx_Wx);
2282/** Opcode VEX.66.0F38 0x9a (vex only). */
2283FNIEMOP_STUB(iemOp_vfmsub132ps_d_Vx_Hx_Wx);
2284/** Opcode VEX.66.0F38 0x9b (vex only). */
2285FNIEMOP_STUB(iemOp_vfmsub132ss_d_Vx_Hx_Wx);
2286/** Opcode VEX.66.0F38 0x9c (vex only). */
2287FNIEMOP_STUB(iemOp_vfnmadd132ps_d_Vx_Hx_Wx);
2288/** Opcode VEX.66.0F38 0x9d (vex only). */
2289FNIEMOP_STUB(iemOp_vfnmadd132ss_d_Vx_Hx_Wx);
2290/** Opcode VEX.66.0F38 0x9e (vex only). */
2291FNIEMOP_STUB(iemOp_vfnmsub132ps_d_Vx_Hx_Wx);
2292/** Opcode VEX.66.0F38 0x9f (vex only). */
2293FNIEMOP_STUB(iemOp_vfnmsub132ss_d_Vx_Hx_Wx);
2294
2295/* Opcode VEX.66.0F38 0xa0 - invalid. */
2296/* Opcode VEX.66.0F38 0xa1 - invalid. */
2297/* Opcode VEX.66.0F38 0xa2 - invalid. */
2298/* Opcode VEX.66.0F38 0xa3 - invalid. */
2299/* Opcode VEX.66.0F38 0xa4 - invalid. */
2300/* Opcode VEX.66.0F38 0xa5 - invalid. */
2301/** Opcode VEX.66.0F38 0xa6 (vex only). */
2302FNIEMOP_STUB(iemOp_vfmaddsub213ps_d_Vx_Hx_Wx);
2303/** Opcode VEX.66.0F38 0xa7 (vex only). */
2304FNIEMOP_STUB(iemOp_vfmsubadd213ps_d_Vx_Hx_Wx);
2305/** Opcode VEX.66.0F38 0xa8 (vex only). */
2306FNIEMOP_STUB(iemOp_vfmadd213ps_d_Vx_Hx_Wx);
2307/** Opcode VEX.66.0F38 0xa9 (vex only). */
2308FNIEMOP_STUB(iemOp_vfmadd213ss_d_Vx_Hx_Wx);
2309/** Opcode VEX.66.0F38 0xaa (vex only). */
2310FNIEMOP_STUB(iemOp_vfmsub213ps_d_Vx_Hx_Wx);
2311/** Opcode VEX.66.0F38 0xab (vex only). */
2312FNIEMOP_STUB(iemOp_vfmsub213ss_d_Vx_Hx_Wx);
2313/** Opcode VEX.66.0F38 0xac (vex only). */
2314FNIEMOP_STUB(iemOp_vfnmadd213ps_d_Vx_Hx_Wx);
2315/** Opcode VEX.66.0F38 0xad (vex only). */
2316FNIEMOP_STUB(iemOp_vfnmadd213ss_d_Vx_Hx_Wx);
2317/** Opcode VEX.66.0F38 0xae (vex only). */
2318FNIEMOP_STUB(iemOp_vfnmsub213ps_d_Vx_Hx_Wx);
2319/** Opcode VEX.66.0F38 0xaf (vex only). */
2320FNIEMOP_STUB(iemOp_vfnmsub213ss_d_Vx_Hx_Wx);
2321
2322/* Opcode VEX.66.0F38 0xb0 - invalid. */
2323/* Opcode VEX.66.0F38 0xb1 - invalid. */
2324/* Opcode VEX.66.0F38 0xb2 - invalid. */
2325/* Opcode VEX.66.0F38 0xb3 - invalid. */
2326/* Opcode VEX.66.0F38 0xb4 - invalid. */
2327/* Opcode VEX.66.0F38 0xb5 - invalid. */
2328/** Opcode VEX.66.0F38 0xb6 (vex only). */
2329FNIEMOP_STUB(iemOp_vfmaddsub231ps_d_Vx_Hx_Wx);
2330/** Opcode VEX.66.0F38 0xb7 (vex only). */
2331FNIEMOP_STUB(iemOp_vfmsubadd231ps_d_Vx_Hx_Wx);
2332/** Opcode VEX.66.0F38 0xb8 (vex only). */
2333FNIEMOP_STUB(iemOp_vfmadd231ps_d_Vx_Hx_Wx);
2334/** Opcode VEX.66.0F38 0xb9 (vex only). */
2335FNIEMOP_STUB(iemOp_vfmadd231ss_d_Vx_Hx_Wx);
2336/** Opcode VEX.66.0F38 0xba (vex only). */
2337FNIEMOP_STUB(iemOp_vfmsub231ps_d_Vx_Hx_Wx);
2338/** Opcode VEX.66.0F38 0xbb (vex only). */
2339FNIEMOP_STUB(iemOp_vfmsub231ss_d_Vx_Hx_Wx);
2340/** Opcode VEX.66.0F38 0xbc (vex only). */
2341FNIEMOP_STUB(iemOp_vfnmadd231ps_d_Vx_Hx_Wx);
2342/** Opcode VEX.66.0F38 0xbd (vex only). */
2343FNIEMOP_STUB(iemOp_vfnmadd231ss_d_Vx_Hx_Wx);
2344/** Opcode VEX.66.0F38 0xbe (vex only). */
2345FNIEMOP_STUB(iemOp_vfnmsub231ps_d_Vx_Hx_Wx);
2346/** Opcode VEX.66.0F38 0xbf (vex only). */
2347FNIEMOP_STUB(iemOp_vfnmsub231ss_d_Vx_Hx_Wx);
2348
2349/* Opcode VEX.0F38 0xc0 - invalid. */
2350/* Opcode VEX.66.0F38 0xc0 - invalid. */
2351/* Opcode VEX.0F38 0xc1 - invalid. */
2352/* Opcode VEX.66.0F38 0xc1 - invalid. */
2353/* Opcode VEX.0F38 0xc2 - invalid. */
2354/* Opcode VEX.66.0F38 0xc2 - invalid. */
2355/* Opcode VEX.0F38 0xc3 - invalid. */
2356/* Opcode VEX.66.0F38 0xc3 - invalid. */
2357/* Opcode VEX.0F38 0xc4 - invalid. */
2358/* Opcode VEX.66.0F38 0xc4 - invalid. */
2359/* Opcode VEX.0F38 0xc5 - invalid. */
2360/* Opcode VEX.66.0F38 0xc5 - invalid. */
2361/* Opcode VEX.0F38 0xc6 - invalid. */
2362/* Opcode VEX.66.0F38 0xc6 - invalid. */
2363/* Opcode VEX.0F38 0xc7 - invalid. */
2364/* Opcode VEX.66.0F38 0xc7 - invalid. */
2365/* Opcode VEX.0F38 0xc8 - invalid. */
2366/* Opcode VEX.66.0F38 0xc8 - invalid. */
2367/* Opcode VEX.0F38 0xc9 - invalid. */
2368/* Opcode VEX.66.0F38 0xc9 - invalid. */
2369/* Opcode VEX.0F38 0xca. */
2370/* Opcode VEX.66.0F38 0xca - invalid. */
2371/* Opcode VEX.0F38 0xcb - invalid. */
2372/* Opcode VEX.66.0F38 0xcb - invalid. */
2373/* Opcode VEX.0F38 0xcc - invalid. */
2374/* Opcode VEX.66.0F38 0xcc - invalid. */
2375/* Opcode VEX.0F38 0xcd - invalid. */
2376/* Opcode VEX.66.0F38 0xcd - invalid. */
2377/* Opcode VEX.0F38 0xce - invalid. */
2378/* Opcode VEX.66.0F38 0xce - invalid. */
2379/* Opcode VEX.0F38 0xcf - invalid. */
2380/* Opcode VEX.66.0F38 0xcf - invalid. */
2381
2382/* Opcode VEX.66.0F38 0xd0 - invalid. */
2383/* Opcode VEX.66.0F38 0xd1 - invalid. */
2384/* Opcode VEX.66.0F38 0xd2 - invalid. */
2385/* Opcode VEX.66.0F38 0xd3 - invalid. */
2386/* Opcode VEX.66.0F38 0xd4 - invalid. */
2387/* Opcode VEX.66.0F38 0xd5 - invalid. */
2388/* Opcode VEX.66.0F38 0xd6 - invalid. */
2389/* Opcode VEX.66.0F38 0xd7 - invalid. */
2390/* Opcode VEX.66.0F38 0xd8 - invalid. */
2391/* Opcode VEX.66.0F38 0xd9 - invalid. */
2392/* Opcode VEX.66.0F38 0xda - invalid. */
2393
2394
2395/** Opcode VEX.66.0F38 0xdb. */
2396FNIEMOP_DEF(iemOp_vaesimc_Vdq_Wdq)
2397{
2398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2399 if (IEM_IS_MODRM_REG_MODE(bRm))
2400 {
2401 /*
2402 * Register, register.
2403 */
2404 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2405 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX_2(fAvx, fAesNi);
2406 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2407 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2408 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2409 IEM_MC_PREPARE_AVX_USAGE();
2410 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2411 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2412 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_vaesimc_u128, iemAImpl_vaesimc_u128_fallback), puDst, puSrc);
2413 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2414 IEM_MC_ADVANCE_RIP_AND_FINISH();
2415 IEM_MC_END();
2416 }
2417 else
2418 {
2419 /*
2420 * Register, memory.
2421 */
2422 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2423 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2424 IEM_MC_LOCAL(RTUINT128U, uSrc);
2425 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2427
2428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2429 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX_2(fAvx, fAesNi);
2430 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2431 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2432
2433 IEM_MC_PREPARE_AVX_USAGE();
2434 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2435 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_vaesimc_u128, iemAImpl_vaesimc_u128_fallback), puDst, puSrc);
2436 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2437 IEM_MC_ADVANCE_RIP_AND_FINISH();
2438 IEM_MC_END();
2439 }
2440}
2441
2442
2443/** Opcode VEX.66.0F38 0xdc. */
2444FNIEMOP_DEF(iemOp_vaesenc_Vdq_Wdq)
2445{
2446 IEMOP_MNEMONIC3(VEX_RVM, VAESENC, vaesenc, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2447 return FNIEMOP_CALL_1(iemOpCommonAvxAesNi_Vx_Hx_Wx,
2448 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_vaesenc_u128, iemAImpl_vaesenc_u128_fallback)); /* ASSUMES fAesNi on the host implies fAvx. */
2449}
2450
2451
2452/** Opcode VEX.66.0F38 0xdd. */
2453FNIEMOP_DEF(iemOp_vaesenclast_Vdq_Wdq)
2454{
2455 IEMOP_MNEMONIC3(VEX_RVM, VAESENCLAST, vaesenclast, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2456 return FNIEMOP_CALL_1(iemOpCommonAvxAesNi_Vx_Hx_Wx,
2457 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_vaesenclast_u128, iemAImpl_vaesenclast_u128_fallback)); /* ASSUMES fAesNi on the host implies fAvx. */
2458}
2459
2460
2461/** Opcode VEX.66.0F38 0xde. */
2462FNIEMOP_DEF(iemOp_vaesdec_Vdq_Wdq)
2463{
2464 IEMOP_MNEMONIC3(VEX_RVM, VAESDEC, vaesdec, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2465 return FNIEMOP_CALL_1(iemOpCommonAvxAesNi_Vx_Hx_Wx,
2466 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_vaesdec_u128, iemAImpl_vaesdec_u128_fallback)); /* ASSUMES fAesNi on the host implies fAvx. */
2467}
2468
2469
2470/** Opcode VEX.66.0F38 0xdf. */
2471FNIEMOP_DEF(iemOp_vaesdeclast_Vdq_Wdq)
2472{
2473 IEMOP_MNEMONIC3(VEX_RVM, VAESDECLAST, vaesdeclast, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2474 return FNIEMOP_CALL_1(iemOpCommonAvxAesNi_Vx_Hx_Wx,
2475 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_vaesdeclast_u128, iemAImpl_vaesdeclast_u128_fallback)); /* ASSUMES fAesNi on the host implies fAvx. */
2476}
2477
2478
2479/* Opcode VEX.66.0F38 0xe0 - invalid. */
2480/* Opcode VEX.66.0F38 0xe1 - invalid. */
2481/* Opcode VEX.66.0F38 0xe2 - invalid. */
2482/* Opcode VEX.66.0F38 0xe3 - invalid. */
2483/* Opcode VEX.66.0F38 0xe4 - invalid. */
2484/* Opcode VEX.66.0F38 0xe5 - invalid. */
2485/* Opcode VEX.66.0F38 0xe6 - invalid. */
2486/* Opcode VEX.66.0F38 0xe7 - invalid. */
2487/* Opcode VEX.66.0F38 0xe8 - invalid. */
2488/* Opcode VEX.66.0F38 0xe9 - invalid. */
2489/* Opcode VEX.66.0F38 0xea - invalid. */
2490/* Opcode VEX.66.0F38 0xeb - invalid. */
2491/* Opcode VEX.66.0F38 0xec - invalid. */
2492/* Opcode VEX.66.0F38 0xed - invalid. */
2493/* Opcode VEX.66.0F38 0xee - invalid. */
2494/* Opcode VEX.66.0F38 0xef - invalid. */
2495
2496
2497/* Opcode VEX.0F38 0xf0 - invalid (legacy only). */
2498/* Opcode VEX.66.0F38 0xf0 - invalid (legacy only). */
2499/* Opcode VEX.F3.0F38 0xf0 - invalid. */
2500/* Opcode VEX.F2.0F38 0xf0 - invalid (legacy only). */
2501
2502/* Opcode VEX.0F38 0xf1 - invalid (legacy only). */
2503/* Opcode VEX.66.0F38 0xf1 - invalid (legacy only). */
2504/* Opcode VEX.F3.0F38 0xf1 - invalid. */
2505/* Opcode VEX.F2.0F38 0xf1 - invalid (legacy only). */
2506
2507/**
2508 * @opcode 0xf2
2509 * @oppfx none
2510 * @opflmodify cf,pf,af,zf,sf,of
2511 * @opflclear cf,of
2512 * @opflundef pf,af
2513 * @note VEX only
2514 */
2515FNIEMOP_DEF(iemOp_andn_Gy_By_Ey)
2516{
2517 IEMOP_MNEMONIC3(VEX_RVM, ANDN, andn, Gy, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2518 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT();
2519 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_PF);
2520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2521 if (IEM_IS_MODRM_REG_MODE(bRm))
2522 {
2523 /*
2524 * Register, register.
2525 */
2526 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2527 {
2528 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2529 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1);
2530 IEM_MC_ARG(uint64_t *, pDst, 0);
2531 IEM_MC_ARG(uint64_t, uSrc1, 1);
2532 IEM_MC_ARG(uint64_t, uSrc2, 2);
2533 IEM_MC_ARG(uint32_t *, pEFlags, 3);
2534 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2535 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2536 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2537 IEM_MC_REF_EFLAGS(pEFlags);
2538 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u64, iemAImpl_andn_u64_fallback),
2539 pDst, uSrc1, uSrc2, pEFlags);
2540 IEM_MC_ADVANCE_RIP_AND_FINISH();
2541 IEM_MC_END();
2542 }
2543 else
2544 {
2545 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2546 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1);
2547 IEM_MC_ARG(uint32_t *, pDst, 0);
2548 IEM_MC_ARG(uint32_t, uSrc1, 1);
2549 IEM_MC_ARG(uint32_t, uSrc2, 2);
2550 IEM_MC_ARG(uint32_t *, pEFlags, 3);
2551 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2552 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2553 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2554 IEM_MC_REF_EFLAGS(pEFlags);
2555 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u32, iemAImpl_andn_u32_fallback),
2556 pDst, uSrc1, uSrc2, pEFlags);
2557 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
2558 IEM_MC_ADVANCE_RIP_AND_FINISH();
2559 IEM_MC_END();
2560 }
2561 }
2562 else
2563 {
2564 /*
2565 * Register, memory.
2566 */
2567 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2568 {
2569 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2570 IEM_MC_ARG(uint64_t *, pDst, 0);
2571 IEM_MC_ARG(uint64_t, uSrc1, 1);
2572 IEM_MC_ARG(uint64_t, uSrc2, 2);
2573 IEM_MC_ARG(uint32_t *, pEFlags, 3);
2574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2576 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1);
2577 IEM_MC_FETCH_MEM_U64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2578 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2579 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2580 IEM_MC_REF_EFLAGS(pEFlags);
2581 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u64, iemAImpl_andn_u64_fallback),
2582 pDst, uSrc1, uSrc2, pEFlags);
2583 IEM_MC_ADVANCE_RIP_AND_FINISH();
2584 IEM_MC_END();
2585 }
2586 else
2587 {
2588 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2589 IEM_MC_ARG(uint32_t *, pDst, 0);
2590 IEM_MC_ARG(uint32_t, uSrc1, 1);
2591 IEM_MC_ARG(uint32_t, uSrc2, 2);
2592 IEM_MC_ARG(uint32_t *, pEFlags, 3);
2593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2595 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1);
2596 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2597 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2598 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2599 IEM_MC_REF_EFLAGS(pEFlags);
2600 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_andn_u32, iemAImpl_andn_u32_fallback),
2601 pDst, uSrc1, uSrc2, pEFlags);
2602 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
2603 IEM_MC_ADVANCE_RIP_AND_FINISH();
2604 IEM_MC_END();
2605 }
2606 }
2607}
2608
2609/* Opcode VEX.66.0F38 0xf2 - invalid. */
2610/* Opcode VEX.F3.0F38 0xf2 - invalid. */
2611/* Opcode VEX.F2.0F38 0xf2 - invalid. */
2612
2613
2614/* Opcode VEX.0F38 0xf3 - invalid. */
2615/* Opcode VEX.66.0F38 0xf3 - invalid. */
2616
2617/* Opcode VEX.F3.0F38 0xf3 /0 - invalid. */
2618
2619/** Body for the vex group 17 instructions. */
2620#define IEMOP_BODY_By_Ey(a_Instr) \
2621 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT(); \
2622 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_PF); \
2623 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2624 { \
2625 /* \
2626 * Register, register. \
2627 */ \
2628 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2629 { \
2630 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2631 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1); \
2632 IEM_MC_ARG(uint64_t, uSrc, 2); \
2633 IEM_MC_FETCH_GREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2634 IEM_MC_ARG(uint64_t *, pDst, 1); \
2635 IEM_MC_REF_GREG_U64(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2636 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
2637 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, \
2638 IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u64, \
2639 iemAImpl_ ## a_Instr ## _u64_fallback), fEFlagsIn, pDst, uSrc); \
2640 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
2641 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2642 IEM_MC_END(); \
2643 } \
2644 else \
2645 { \
2646 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2647 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1); \
2648 IEM_MC_ARG(uint32_t, uSrc, 2); \
2649 IEM_MC_FETCH_GREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2650 IEM_MC_ARG(uint32_t *, pDst, 1); \
2651 IEM_MC_REF_GREG_U32(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2652 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
2653 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, \
2654 IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u32, \
2655 iemAImpl_ ## a_Instr ## _u32_fallback), fEFlagsIn, pDst, uSrc); \
2656 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2657 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
2658 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2659 IEM_MC_END(); \
2660 } \
2661 } \
2662 else \
2663 { \
2664 /* \
2665 * Register, memory. \
2666 */ \
2667 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2668 { \
2669 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2672 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1); \
2673 \
2674 IEM_MC_ARG(uint64_t, uSrc, 2); \
2675 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2676 IEM_MC_ARG(uint64_t *, pDst, 1); \
2677 IEM_MC_REF_GREG_U64(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2678 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
2679 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, \
2680 IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u64, \
2681 iemAImpl_ ## a_Instr ## _u64_fallback), fEFlagsIn, pDst, uSrc); \
2682 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
2683 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2684 IEM_MC_END(); \
2685 } \
2686 else \
2687 { \
2688 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2691 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1); \
2692 \
2693 IEM_MC_ARG(uint32_t, uSrc, 2); \
2694 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2695 IEM_MC_ARG(uint32_t *, pDst, 1); \
2696 IEM_MC_REF_GREG_U32(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2697 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \
2698 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, \
2699 IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u32, \
2700 iemAImpl_ ## a_Instr ## _u32_fallback), fEFlagsIn, pDst, uSrc); \
2701 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2702 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \
2703 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2704 IEM_MC_END(); \
2705 } \
2706 } \
2707 (void)0
2708
2709
2710/**
2711 * @opmaps vexgrp17
2712 * @opcode /1
2713 * @opflmodify cf,pf,af,zf,sf,of
2714 * @opflclear of
2715 * @opflundef pf,af
2716 */
2717FNIEMOP_DEF_1(iemOp_VGrp17_blsr_By_Ey, uint8_t, bRm)
2718{
2719 IEMOP_MNEMONIC2(VEX_VM, BLSR, blsr, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2720 IEMOP_BODY_By_Ey(blsr);
2721}
2722
2723
2724/**
2725 * @opmaps vexgrp17
2726 * @opcode /2
2727 * @opflmodify cf,pf,af,zf,sf,of
2728 * @opflclear zf,of
2729 * @opflundef pf,af
2730 */
2731FNIEMOP_DEF_1(iemOp_VGrp17_blsmsk_By_Ey, uint8_t, bRm)
2732{
2733 IEMOP_MNEMONIC2(VEX_VM, BLSMSK, blsmsk, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2734 IEMOP_BODY_By_Ey(blsmsk);
2735}
2736
2737
2738/**
2739 * @opmaps vexgrp17
2740 * @opcode /3
2741 * @opflmodify cf,pf,af,zf,sf,of
2742 * @opflclear of
2743 * @opflundef pf,af
2744 */
2745FNIEMOP_DEF_1(iemOp_VGrp17_blsi_By_Ey, uint8_t, bRm)
2746{
2747 IEMOP_MNEMONIC2(VEX_VM, BLSI, blsi, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2748 IEMOP_BODY_By_Ey(blsi);
2749}
2750
2751
2752/* Opcode VEX.F3.0F38 0xf3 /4 - invalid. */
2753/* Opcode VEX.F3.0F38 0xf3 /5 - invalid. */
2754/* Opcode VEX.F3.0F38 0xf3 /6 - invalid. */
2755/* Opcode VEX.F3.0F38 0xf3 /7 - invalid. */
2756
2757/**
2758 * Group 17 jump table for the VEX.F3 variant.
2759 */
2760IEM_STATIC const PFNIEMOPRM g_apfnVexGroup17_f3[] =
2761{
2762 /* /0 */ iemOp_InvalidWithRM,
2763 /* /1 */ iemOp_VGrp17_blsr_By_Ey,
2764 /* /2 */ iemOp_VGrp17_blsmsk_By_Ey,
2765 /* /3 */ iemOp_VGrp17_blsi_By_Ey,
2766 /* /4 */ iemOp_InvalidWithRM,
2767 /* /5 */ iemOp_InvalidWithRM,
2768 /* /6 */ iemOp_InvalidWithRM,
2769 /* /7 */ iemOp_InvalidWithRM
2770};
2771AssertCompile(RT_ELEMENTS(g_apfnVexGroup17_f3) == 8);
2772
2773/** Opcode VEX.F3.0F38 0xf3 - invalid (vex only - group 17). */
2774FNIEMOP_DEF(iemOp_VGrp17_f3)
2775{
2776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2777 return FNIEMOP_CALL_1(g_apfnVexGroup17_f3[IEM_GET_MODRM_REG_8(bRm)], bRm);
2778}
2779
2780/* Opcode VEX.F2.0F38 0xf3 - invalid (vex only - group 17). */
2781
2782
2783/* Opcode VEX.0F38 0xf4 - invalid. */
2784/* Opcode VEX.66.0F38 0xf4 - invalid. */
2785/* Opcode VEX.F3.0F38 0xf4 - invalid. */
2786/* Opcode VEX.F2.0F38 0xf4 - invalid. */
2787
2788/** Body for BZHI, BEXTR, ++; assumes VEX.L must be 0. */
2789#define IEMOP_BODY_Gy_Ey_By(a_Instr, a_fFeatureMember, a_fUndefFlags) \
2790 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT(); \
2791 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(a_fUndefFlags); \
2792 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2793 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2794 { \
2795 /* \
2796 * Register, register. \
2797 */ \
2798 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2799 { \
2800 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2801 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2802 IEM_MC_ARG(uint64_t *, pDst, 0); \
2803 IEM_MC_ARG(uint64_t, uSrc1, 1); \
2804 IEM_MC_ARG(uint64_t, uSrc2, 2); \
2805 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
2806 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2807 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2808 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2809 IEM_MC_REF_EFLAGS(pEFlags); \
2810 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
2811 iemAImpl_ ## a_Instr ## _u64_fallback), \
2812 pDst, uSrc1, uSrc2, pEFlags); \
2813 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2814 IEM_MC_END(); \
2815 } \
2816 else \
2817 { \
2818 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2819 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2820 IEM_MC_ARG(uint32_t *, pDst, 0); \
2821 IEM_MC_ARG(uint32_t, uSrc1, 1); \
2822 IEM_MC_ARG(uint32_t, uSrc2, 2); \
2823 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
2824 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2825 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2826 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2827 IEM_MC_REF_EFLAGS(pEFlags); \
2828 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
2829 iemAImpl_ ## a_Instr ## _u32_fallback), \
2830 pDst, uSrc1, uSrc2, pEFlags); \
2831 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
2832 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2833 IEM_MC_END(); \
2834 } \
2835 } \
2836 else \
2837 { \
2838 /* \
2839 * Register, memory. \
2840 */ \
2841 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2842 { \
2843 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2844 IEM_MC_ARG(uint64_t *, pDst, 0); \
2845 IEM_MC_ARG(uint64_t, uSrc1, 1); \
2846 IEM_MC_ARG(uint64_t, uSrc2, 2); \
2847 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
2848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2850 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2851 IEM_MC_FETCH_MEM_U64(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2852 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2853 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2854 IEM_MC_REF_EFLAGS(pEFlags); \
2855 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
2856 iemAImpl_ ## a_Instr ## _u64_fallback), \
2857 pDst, uSrc1, uSrc2, pEFlags); \
2858 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2859 IEM_MC_END(); \
2860 } \
2861 else \
2862 { \
2863 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2864 IEM_MC_ARG(uint32_t *, pDst, 0); \
2865 IEM_MC_ARG(uint32_t, uSrc1, 1); \
2866 IEM_MC_ARG(uint32_t, uSrc2, 2); \
2867 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
2868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2870 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2871 IEM_MC_FETCH_MEM_U32(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2872 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2873 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2874 IEM_MC_REF_EFLAGS(pEFlags); \
2875 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
2876 iemAImpl_ ## a_Instr ## _u32_fallback), \
2877 pDst, uSrc1, uSrc2, pEFlags); \
2878 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
2879 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2880 IEM_MC_END(); \
2881 } \
2882 } \
2883 (void)0
2884
2885/** Body for SARX, SHLX, SHRX; assumes VEX.L must be 0. */
2886#define IEMOP_BODY_Gy_Ey_By_NoEflags(a_Instr, a_fFeatureMember) \
2887 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT(); \
2888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2889 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2890 { \
2891 /* \
2892 * Register, register. \
2893 */ \
2894 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2895 { \
2896 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2897 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2898 IEM_MC_ARG(uint64_t *, pDst, 0); \
2899 IEM_MC_ARG(uint64_t, uSrc1, 1); \
2900 IEM_MC_ARG(uint64_t, uSrc2, 2); \
2901 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2902 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2903 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2904 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
2905 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
2906 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2907 IEM_MC_END(); \
2908 } \
2909 else \
2910 { \
2911 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2912 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2913 IEM_MC_ARG(uint32_t *, pDst, 0); \
2914 IEM_MC_ARG(uint32_t, uSrc1, 1); \
2915 IEM_MC_ARG(uint32_t, uSrc2, 2); \
2916 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2917 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2918 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2919 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
2920 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
2921 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
2922 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2923 IEM_MC_END(); \
2924 } \
2925 } \
2926 else \
2927 { \
2928 /* \
2929 * Register, memory. \
2930 */ \
2931 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2932 { \
2933 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2934 IEM_MC_ARG(uint64_t *, pDst, 0); \
2935 IEM_MC_ARG(uint64_t, uSrc1, 1); \
2936 IEM_MC_ARG(uint64_t, uSrc2, 2); \
2937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2939 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2940 IEM_MC_FETCH_MEM_U64(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2941 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2942 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2943 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u64, \
2944 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
2945 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2946 IEM_MC_END(); \
2947 } \
2948 else \
2949 { \
2950 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2951 IEM_MC_ARG(uint32_t *, pDst, 0); \
2952 IEM_MC_ARG(uint32_t, uSrc1, 1); \
2953 IEM_MC_ARG(uint32_t, uSrc2, 2); \
2954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2956 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2957 IEM_MC_FETCH_MEM_U32(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2958 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
2959 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2960 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, iemAImpl_ ## a_Instr ## _u32, \
2961 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
2962 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
2963 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2964 IEM_MC_END(); \
2965 } \
2966 } \
2967 (void)0
2968
2969/**
2970 * @opcode 0xf5
2971 * @oppfx none
2972 * @opflmodify cf,pf,af,zf,sf,of
2973 * @opflclear of
2974 * @opflundef pf,af
2975 * @note VEX only
2976 */
2977FNIEMOP_DEF(iemOp_bzhi_Gy_Ey_By)
2978{
2979 IEMOP_MNEMONIC3(VEX_RMV, BZHI, bzhi, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2980 IEMOP_BODY_Gy_Ey_By(bzhi, fBmi2, X86_EFL_AF | X86_EFL_PF);
2981}
2982
2983/* Opcode VEX.66.0F38 0xf5 - invalid. */
2984
2985/** Body for PDEP and PEXT (similar to ANDN, except no EFLAGS). */
2986#define IEMOP_BODY_Gy_By_Ey_NoEflags(a_Instr, a_fFeatureMember) \
2987 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT(); \
2988 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2989 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2990 { \
2991 /* \
2992 * Register, register. \
2993 */ \
2994 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2995 { \
2996 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2997 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
2998 IEM_MC_ARG(uint64_t *, pDst, 0); \
2999 IEM_MC_ARG(uint64_t, uSrc1, 1); \
3000 IEM_MC_ARG(uint64_t, uSrc2, 2); \
3001 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
3002 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3003 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
3004 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
3005 iemAImpl_ ## a_Instr ## _u64, \
3006 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
3007 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3008 IEM_MC_END(); \
3009 } \
3010 else \
3011 { \
3012 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
3013 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
3014 IEM_MC_ARG(uint32_t *, pDst, 0); \
3015 IEM_MC_ARG(uint32_t, uSrc1, 1); \
3016 IEM_MC_ARG(uint32_t, uSrc2, 2); \
3017 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
3018 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3019 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
3020 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
3021 iemAImpl_ ## a_Instr ## _u32, \
3022 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
3023 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
3024 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3025 IEM_MC_END(); \
3026 } \
3027 } \
3028 else \
3029 { \
3030 /* \
3031 * Register, memory. \
3032 */ \
3033 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
3034 { \
3035 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
3036 IEM_MC_ARG(uint64_t *, pDst, 0); \
3037 IEM_MC_ARG(uint64_t, uSrc1, 1); \
3038 IEM_MC_ARG(uint64_t, uSrc2, 2); \
3039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3041 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
3042 IEM_MC_FETCH_MEM_U64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3043 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
3044 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
3045 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
3046 iemAImpl_ ## a_Instr ## _u64, \
3047 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \
3048 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3049 IEM_MC_END(); \
3050 } \
3051 else \
3052 { \
3053 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
3054 IEM_MC_ARG(uint32_t *, pDst, 0); \
3055 IEM_MC_ARG(uint32_t, uSrc1, 1); \
3056 IEM_MC_ARG(uint32_t, uSrc2, 2); \
3057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
3058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
3059 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeatureMember); \
3060 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
3061 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
3062 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
3063 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \
3064 iemAImpl_ ## a_Instr ## _u32, \
3065 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \
3066 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm)); \
3067 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3068 IEM_MC_END(); \
3069 } \
3070 } \
3071 (void)0
3072
3073
3074/** Opcode VEX.F3.0F38 0xf5 (vex only). */
3075FNIEMOP_DEF(iemOp_pext_Gy_By_Ey)
3076{
3077 IEMOP_MNEMONIC3(VEX_RVM, PEXT, pext, Gy, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3078 IEMOP_BODY_Gy_By_Ey_NoEflags(pext, fBmi2);
3079}
3080
3081
3082/** Opcode VEX.F2.0F38 0xf5 (vex only). */
3083FNIEMOP_DEF(iemOp_pdep_Gy_By_Ey)
3084{
3085 IEMOP_MNEMONIC3(VEX_RVM, PDEP, pdep, Gy, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3086 IEMOP_BODY_Gy_By_Ey_NoEflags(pdep, fBmi2);
3087}
3088
3089
3090/* Opcode VEX.0F38 0xf6 - invalid. */
3091/* Opcode VEX.66.0F38 0xf6 - invalid (legacy only). */
3092/* Opcode VEX.F3.0F38 0xf6 - invalid (legacy only). */
3093
3094
3095/**
3096 * @opcode 0xf6
3097 * @oppfx 0xf2
3098 * @opflclass unchanged
3099 */
3100FNIEMOP_DEF(iemOp_mulx_By_Gy_rDX_Ey)
3101{
3102 IEMOP_MNEMONIC4(VEX_RVM, MULX, mulx, Gy, By, Ey, rDX, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3103 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT();
3104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3105 if (IEM_IS_MODRM_REG_MODE(bRm))
3106 {
3107 /*
3108 * Register, register.
3109 */
3110 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3111 {
3112 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3113 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi2);
3114 IEM_MC_ARG(uint64_t *, pDst1, 0);
3115 IEM_MC_ARG(uint64_t *, pDst2, 1);
3116 IEM_MC_ARG(uint64_t, uSrc1, 2);
3117 IEM_MC_ARG(uint64_t, uSrc2, 3);
3118 IEM_MC_FETCH_GREG_U64(uSrc1, X86_GREG_xDX);
3119 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
3120 IEM_MC_REF_GREG_U64(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
3121 IEM_MC_REF_GREG_U64(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
3122 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u64, iemAImpl_mulx_u64_fallback),
3123 pDst1, pDst2, uSrc1, uSrc2);
3124 IEM_MC_ADVANCE_RIP_AND_FINISH();
3125 IEM_MC_END();
3126 }
3127 else
3128 {
3129 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3130 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi2);
3131 IEM_MC_ARG(uint32_t *, pDst1, 0);
3132 IEM_MC_ARG(uint32_t *, pDst2, 1);
3133 IEM_MC_ARG(uint32_t, uSrc1, 2);
3134 IEM_MC_ARG(uint32_t, uSrc2, 3);
3135 IEM_MC_FETCH_GREG_U32(uSrc1, X86_GREG_xDX);
3136 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
3137 IEM_MC_REF_GREG_U32(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
3138 IEM_MC_REF_GREG_U32(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
3139 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u32, iemAImpl_mulx_u32_fallback),
3140 pDst1, pDst2, uSrc1, uSrc2);
3141 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_EFFECTIVE_VVVV(pVCpu));
3142 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
3143 IEM_MC_ADVANCE_RIP_AND_FINISH();
3144 IEM_MC_END();
3145 }
3146 }
3147 else
3148 {
3149 /*
3150 * Register, memory.
3151 */
3152 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3153 {
3154 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
3155 IEM_MC_ARG(uint64_t *, pDst1, 0);
3156 IEM_MC_ARG(uint64_t *, pDst2, 1);
3157 IEM_MC_ARG(uint64_t, uSrc1, 2);
3158 IEM_MC_ARG(uint64_t, uSrc2, 3);
3159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3161 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi2);
3162 IEM_MC_FETCH_MEM_U64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3163 IEM_MC_FETCH_GREG_U64(uSrc1, X86_GREG_xDX);
3164 IEM_MC_REF_GREG_U64(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
3165 IEM_MC_REF_GREG_U64(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
3166 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u64, iemAImpl_mulx_u64_fallback),
3167 pDst1, pDst2, uSrc1, uSrc2);
3168 IEM_MC_ADVANCE_RIP_AND_FINISH();
3169 IEM_MC_END();
3170 }
3171 else
3172 {
3173 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3174 IEM_MC_ARG(uint32_t *, pDst1, 0);
3175 IEM_MC_ARG(uint32_t *, pDst2, 1);
3176 IEM_MC_ARG(uint32_t, uSrc1, 2);
3177 IEM_MC_ARG(uint32_t, uSrc2, 3);
3178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3180 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi2);
3181 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3182 IEM_MC_FETCH_GREG_U32(uSrc1, X86_GREG_xDX);
3183 IEM_MC_REF_GREG_U32(pDst2, IEM_GET_EFFECTIVE_VVVV(pVCpu));
3184 IEM_MC_REF_GREG_U32(pDst1, IEM_GET_MODRM_REG(pVCpu, bRm));
3185 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fBmi2, iemAImpl_mulx_u32, iemAImpl_mulx_u32_fallback),
3186 pDst1, pDst2, uSrc1, uSrc2);
3187 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_EFFECTIVE_VVVV(pVCpu));
3188 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
3189 IEM_MC_ADVANCE_RIP_AND_FINISH();
3190 IEM_MC_END();
3191 }
3192 }
3193}
3194
3195
3196/**
3197 * @opcode 0xf7
3198 * @oppfx none
3199 * @opflmodify cf,pf,af,zf,sf,of
3200 * @opflclear cf,of
3201 * @opflundef pf,af,sf
3202 */
3203FNIEMOP_DEF(iemOp_bextr_Gy_Ey_By)
3204{
3205 IEMOP_MNEMONIC3(VEX_RMV, BEXTR, bextr, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3206 IEMOP_BODY_Gy_Ey_By(bextr, fBmi1, X86_EFL_SF | X86_EFL_AF | X86_EFL_PF);
3207}
3208
3209
3210/**
3211 * @opcode 0xf7
3212 * @oppfx 0x66
3213 * @opflclass unchanged
3214 */
3215FNIEMOP_DEF(iemOp_shlx_Gy_Ey_By)
3216{
3217 IEMOP_MNEMONIC3(VEX_RMV, SHLX, shlx, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3218 IEMOP_BODY_Gy_Ey_By_NoEflags(shlx, fBmi2);
3219}
3220
3221
3222/**
3223 * @opcode 0xf7
3224 * @oppfx 0xf3
3225 * @opflclass unchanged
3226 */
3227FNIEMOP_DEF(iemOp_sarx_Gy_Ey_By)
3228{
3229 IEMOP_MNEMONIC3(VEX_RMV, SARX, sarx, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3230 IEMOP_BODY_Gy_Ey_By_NoEflags(sarx, fBmi2);
3231}
3232
3233
3234/**
3235 * @opcode 0xf7
3236 * @oppfx 0xf2
3237 * @opflclass unchanged
3238 */
3239FNIEMOP_DEF(iemOp_shrx_Gy_Ey_By)
3240{
3241 IEMOP_MNEMONIC3(VEX_RMV, SHRX, shrx, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3242 IEMOP_BODY_Gy_Ey_By_NoEflags(shrx, fBmi2);
3243}
3244
3245/* Opcode VEX.0F38 0xf8 - invalid. */
3246/* Opcode VEX.66.0F38 0xf8 - invalid. */
3247/* Opcode VEX.F3.0F38 0xf8 - invalid. */
3248/* Opcode VEX.F2.0F38 0xf8 - invalid. */
3249
3250/* Opcode VEX.0F38 0xf9 - invalid. */
3251/* Opcode VEX.66.0F38 0xf9 - invalid. */
3252/* Opcode VEX.F3.0F38 0xf9 - invalid. */
3253/* Opcode VEX.F2.0F38 0xf9 - invalid. */
3254
3255/* Opcode VEX.0F38 0xfa - invalid. */
3256/* Opcode VEX.66.0F38 0xfa - invalid. */
3257/* Opcode VEX.F3.0F38 0xfa - invalid. */
3258/* Opcode VEX.F2.0F38 0xfa - invalid. */
3259
3260/* Opcode VEX.0F38 0xfb - invalid. */
3261/* Opcode VEX.66.0F38 0xfb - invalid. */
3262/* Opcode VEX.F3.0F38 0xfb - invalid. */
3263/* Opcode VEX.F2.0F38 0xfb - invalid. */
3264
3265/* Opcode VEX.0F38 0xfc - invalid. */
3266/* Opcode VEX.66.0F38 0xfc - invalid. */
3267/* Opcode VEX.F3.0F38 0xfc - invalid. */
3268/* Opcode VEX.F2.0F38 0xfc - invalid. */
3269
3270/* Opcode VEX.0F38 0xfd - invalid. */
3271/* Opcode VEX.66.0F38 0xfd - invalid. */
3272/* Opcode VEX.F3.0F38 0xfd - invalid. */
3273/* Opcode VEX.F2.0F38 0xfd - invalid. */
3274
3275/* Opcode VEX.0F38 0xfe - invalid. */
3276/* Opcode VEX.66.0F38 0xfe - invalid. */
3277/* Opcode VEX.F3.0F38 0xfe - invalid. */
3278/* Opcode VEX.F2.0F38 0xfe - invalid. */
3279
3280/* Opcode VEX.0F38 0xff - invalid. */
3281/* Opcode VEX.66.0F38 0xff - invalid. */
3282/* Opcode VEX.F3.0F38 0xff - invalid. */
3283/* Opcode VEX.F2.0F38 0xff - invalid. */
3284
3285
3286/**
3287 * VEX opcode map \#2.
3288 *
3289 * @sa g_apfnThreeByte0f38
3290 */
3291const PFNIEMOP g_apfnVexMap2[] =
3292{
3293 /* no prefix, 066h prefix f3h prefix, f2h prefix */
3294 /* 0x00 */ iemOp_InvalidNeedRM, iemOp_vpshufb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3295 /* 0x01 */ iemOp_InvalidNeedRM, iemOp_vphaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3296 /* 0x02 */ iemOp_InvalidNeedRM, iemOp_vphaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3297 /* 0x03 */ iemOp_InvalidNeedRM, iemOp_vphaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3298 /* 0x04 */ iemOp_InvalidNeedRM, iemOp_vpmaddubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3299 /* 0x05 */ iemOp_InvalidNeedRM, iemOp_vphsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3300 /* 0x06 */ iemOp_InvalidNeedRM, iemOp_vphsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3301 /* 0x07 */ iemOp_InvalidNeedRM, iemOp_vphsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3302 /* 0x08 */ iemOp_InvalidNeedRM, iemOp_vpsignb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3303 /* 0x09 */ iemOp_InvalidNeedRM, iemOp_vpsignw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3304 /* 0x0a */ iemOp_InvalidNeedRM, iemOp_vpsignd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3305 /* 0x0b */ iemOp_InvalidNeedRM, iemOp_vpmulhrsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3306 /* 0x0c */ iemOp_InvalidNeedRM, iemOp_vpermilps_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3307 /* 0x0d */ iemOp_InvalidNeedRM, iemOp_vpermilpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3308 /* 0x0e */ iemOp_InvalidNeedRM, iemOp_vtestps_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3309 /* 0x0f */ iemOp_InvalidNeedRM, iemOp_vtestpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3310
3311 /* 0x10 */ IEMOP_X4(iemOp_InvalidNeedRM),
3312 /* 0x11 */ IEMOP_X4(iemOp_InvalidNeedRM),
3313 /* 0x12 */ IEMOP_X4(iemOp_InvalidNeedRM),
3314 /* 0x13 */ iemOp_InvalidNeedRM, iemOp_vcvtph2ps_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3315 /* 0x14 */ IEMOP_X4(iemOp_InvalidNeedRM),
3316 /* 0x15 */ IEMOP_X4(iemOp_InvalidNeedRM),
3317 /* 0x16 */ iemOp_InvalidNeedRM, iemOp_vpermps_Vqq_Hqq_Wqq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3318 /* 0x17 */ iemOp_InvalidNeedRM, iemOp_vptest_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3319 /* 0x18 */ iemOp_InvalidNeedRM, iemOp_vbroadcastss_Vx_Wd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3320 /* 0x19 */ iemOp_InvalidNeedRM, iemOp_vbroadcastsd_Vqq_Wq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3321 /* 0x1a */ iemOp_InvalidNeedRM, iemOp_vbroadcastf128_Vqq_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3322 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
3323 /* 0x1c */ iemOp_InvalidNeedRM, iemOp_vpabsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3324 /* 0x1d */ iemOp_InvalidNeedRM, iemOp_vpabsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3325 /* 0x1e */ iemOp_InvalidNeedRM, iemOp_vpabsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3326 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
3327
3328 /* 0x20 */ iemOp_InvalidNeedRM, iemOp_vpmovsxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3329 /* 0x21 */ iemOp_InvalidNeedRM, iemOp_vpmovsxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3330 /* 0x22 */ iemOp_InvalidNeedRM, iemOp_vpmovsxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3331 /* 0x23 */ iemOp_InvalidNeedRM, iemOp_vpmovsxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3332 /* 0x24 */ iemOp_InvalidNeedRM, iemOp_vpmovsxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3333 /* 0x25 */ iemOp_InvalidNeedRM, iemOp_vpmovsxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3334 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
3335 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
3336 /* 0x28 */ iemOp_InvalidNeedRM, iemOp_vpmuldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3337 /* 0x29 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3338 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_vmovntdqa_Vx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3339 /* 0x2b */ iemOp_InvalidNeedRM, iemOp_vpackusdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3340 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_vmaskmovps_Vx_Hx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3341 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_vmaskmovpd_Vx_Hx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3342 /* 0x2e */ iemOp_InvalidNeedRM, iemOp_vmaskmovps_Mx_Hx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3343 /* 0x2f */ iemOp_InvalidNeedRM, iemOp_vmaskmovpd_Mx_Hx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3344
3345 /* 0x30 */ iemOp_InvalidNeedRM, iemOp_vpmovzxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3346 /* 0x31 */ iemOp_InvalidNeedRM, iemOp_vpmovzxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3347 /* 0x32 */ iemOp_InvalidNeedRM, iemOp_vpmovzxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3348 /* 0x33 */ iemOp_InvalidNeedRM, iemOp_vpmovzxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3349 /* 0x34 */ iemOp_InvalidNeedRM, iemOp_vpmovzxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3350 /* 0x35 */ iemOp_InvalidNeedRM, iemOp_vpmovzxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3351 /* 0x36 */ iemOp_InvalidNeedRM, iemOp_vpermd_Vqq_Hqq_Wqq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3352 /* 0x37 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3353 /* 0x38 */ iemOp_InvalidNeedRM, iemOp_vpminsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3354 /* 0x39 */ iemOp_InvalidNeedRM, iemOp_vpminsd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3355 /* 0x3a */ iemOp_InvalidNeedRM, iemOp_vpminuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3356 /* 0x3b */ iemOp_InvalidNeedRM, iemOp_vpminud_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3357 /* 0x3c */ iemOp_InvalidNeedRM, iemOp_vpmaxsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3358 /* 0x3d */ iemOp_InvalidNeedRM, iemOp_vpmaxsd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3359 /* 0x3e */ iemOp_InvalidNeedRM, iemOp_vpmaxuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3360 /* 0x3f */ iemOp_InvalidNeedRM, iemOp_vpmaxud_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3361
3362 /* 0x40 */ iemOp_InvalidNeedRM, iemOp_vpmulld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3363 /* 0x41 */ iemOp_InvalidNeedRM, iemOp_vphminposuw_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3364 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
3365 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
3366 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
3367 /* 0x45 */ iemOp_InvalidNeedRM, iemOp_vpsrlvd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3368 /* 0x46 */ iemOp_InvalidNeedRM, iemOp_vpsravd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3369 /* 0x47 */ iemOp_InvalidNeedRM, iemOp_vpsllvd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3370 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
3371 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
3372 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
3373 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
3374 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
3375 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
3376 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
3377 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
3378
3379 /* 0x50 */ IEMOP_X4(iemOp_InvalidNeedRM),
3380 /* 0x51 */ IEMOP_X4(iemOp_InvalidNeedRM),
3381 /* 0x52 */ IEMOP_X4(iemOp_InvalidNeedRM),
3382 /* 0x53 */ IEMOP_X4(iemOp_InvalidNeedRM),
3383 /* 0x54 */ IEMOP_X4(iemOp_InvalidNeedRM),
3384 /* 0x55 */ IEMOP_X4(iemOp_InvalidNeedRM),
3385 /* 0x56 */ IEMOP_X4(iemOp_InvalidNeedRM),
3386 /* 0x57 */ IEMOP_X4(iemOp_InvalidNeedRM),
3387 /* 0x58 */ iemOp_InvalidNeedRM, iemOp_vpbroadcastd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3388 /* 0x59 */ iemOp_InvalidNeedRM, iemOp_vpbroadcastq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3389 /* 0x5a */ iemOp_InvalidNeedRM, iemOp_vbroadcasti128_Vqq_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3390 /* 0x5b */ IEMOP_X4(iemOp_InvalidNeedRM),
3391 /* 0x5c */ IEMOP_X4(iemOp_InvalidNeedRM),
3392 /* 0x5d */ IEMOP_X4(iemOp_InvalidNeedRM),
3393 /* 0x5e */ IEMOP_X4(iemOp_InvalidNeedRM),
3394 /* 0x5f */ IEMOP_X4(iemOp_InvalidNeedRM),
3395
3396 /* 0x60 */ IEMOP_X4(iemOp_InvalidNeedRM),
3397 /* 0x61 */ IEMOP_X4(iemOp_InvalidNeedRM),
3398 /* 0x62 */ IEMOP_X4(iemOp_InvalidNeedRM),
3399 /* 0x63 */ IEMOP_X4(iemOp_InvalidNeedRM),
3400 /* 0x64 */ IEMOP_X4(iemOp_InvalidNeedRM),
3401 /* 0x65 */ IEMOP_X4(iemOp_InvalidNeedRM),
3402 /* 0x66 */ IEMOP_X4(iemOp_InvalidNeedRM),
3403 /* 0x67 */ IEMOP_X4(iemOp_InvalidNeedRM),
3404 /* 0x68 */ IEMOP_X4(iemOp_InvalidNeedRM),
3405 /* 0x69 */ IEMOP_X4(iemOp_InvalidNeedRM),
3406 /* 0x6a */ IEMOP_X4(iemOp_InvalidNeedRM),
3407 /* 0x6b */ IEMOP_X4(iemOp_InvalidNeedRM),
3408 /* 0x6c */ IEMOP_X4(iemOp_InvalidNeedRM),
3409 /* 0x6d */ IEMOP_X4(iemOp_InvalidNeedRM),
3410 /* 0x6e */ IEMOP_X4(iemOp_InvalidNeedRM),
3411 /* 0x6f */ IEMOP_X4(iemOp_InvalidNeedRM),
3412
3413 /* 0x70 */ IEMOP_X4(iemOp_InvalidNeedRM),
3414 /* 0x71 */ IEMOP_X4(iemOp_InvalidNeedRM),
3415 /* 0x72 */ IEMOP_X4(iemOp_InvalidNeedRM),
3416 /* 0x73 */ IEMOP_X4(iemOp_InvalidNeedRM),
3417 /* 0x74 */ IEMOP_X4(iemOp_InvalidNeedRM),
3418 /* 0x75 */ IEMOP_X4(iemOp_InvalidNeedRM),
3419 /* 0x76 */ IEMOP_X4(iemOp_InvalidNeedRM),
3420 /* 0x77 */ IEMOP_X4(iemOp_InvalidNeedRM),
3421 /* 0x78 */ iemOp_InvalidNeedRM, iemOp_vpbroadcastb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3422 /* 0x79 */ iemOp_InvalidNeedRM, iemOp_vpbroadcastw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3423 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
3424 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
3425 /* 0x7c */ IEMOP_X4(iemOp_InvalidNeedRM),
3426 /* 0x7d */ IEMOP_X4(iemOp_InvalidNeedRM),
3427 /* 0x7e */ IEMOP_X4(iemOp_InvalidNeedRM),
3428 /* 0x7f */ IEMOP_X4(iemOp_InvalidNeedRM),
3429
3430 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
3431 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
3432 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
3433 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
3434 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
3435 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
3436 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
3437 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
3438 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
3439 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
3440 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
3441 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
3442 /* 0x8c */ iemOp_InvalidNeedRM, iemOp_vpmaskmovd_q_Vx_Hx_Mx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3443 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
3444 /* 0x8e */ iemOp_InvalidNeedRM, iemOp_vpmaskmovd_q_Mx_Vx_Hx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3445 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
3446
3447 /* 0x90 */ iemOp_InvalidNeedRM, iemOp_vpgatherdd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3448 /* 0x91 */ iemOp_InvalidNeedRM, iemOp_vpgatherqd_q_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3449 /* 0x92 */ iemOp_InvalidNeedRM, iemOp_vgatherdps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3450 /* 0x93 */ iemOp_InvalidNeedRM, iemOp_vgatherqps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3451 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
3452 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
3453 /* 0x96 */ iemOp_InvalidNeedRM, iemOp_vfmaddsub132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3454 /* 0x97 */ iemOp_InvalidNeedRM, iemOp_vfmsubadd132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3455 /* 0x98 */ iemOp_InvalidNeedRM, iemOp_vfmadd132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3456 /* 0x99 */ iemOp_InvalidNeedRM, iemOp_vfmadd132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3457 /* 0x9a */ iemOp_InvalidNeedRM, iemOp_vfmsub132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3458 /* 0x9b */ iemOp_InvalidNeedRM, iemOp_vfmsub132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3459 /* 0x9c */ iemOp_InvalidNeedRM, iemOp_vfnmadd132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3460 /* 0x9d */ iemOp_InvalidNeedRM, iemOp_vfnmadd132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3461 /* 0x9e */ iemOp_InvalidNeedRM, iemOp_vfnmsub132ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3462 /* 0x9f */ iemOp_InvalidNeedRM, iemOp_vfnmsub132ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3463
3464 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
3465 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
3466 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
3467 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
3468 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
3469 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
3470 /* 0xa6 */ iemOp_InvalidNeedRM, iemOp_vfmaddsub213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3471 /* 0xa7 */ iemOp_InvalidNeedRM, iemOp_vfmsubadd213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3472 /* 0xa8 */ iemOp_InvalidNeedRM, iemOp_vfmadd213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3473 /* 0xa9 */ iemOp_InvalidNeedRM, iemOp_vfmadd213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3474 /* 0xaa */ iemOp_InvalidNeedRM, iemOp_vfmsub213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3475 /* 0xab */ iemOp_InvalidNeedRM, iemOp_vfmsub213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3476 /* 0xac */ iemOp_InvalidNeedRM, iemOp_vfnmadd213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3477 /* 0xad */ iemOp_InvalidNeedRM, iemOp_vfnmadd213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3478 /* 0xae */ iemOp_InvalidNeedRM, iemOp_vfnmsub213ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3479 /* 0xaf */ iemOp_InvalidNeedRM, iemOp_vfnmsub213ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3480
3481 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
3482 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
3483 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
3484 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
3485 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
3486 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
3487 /* 0xb6 */ iemOp_InvalidNeedRM, iemOp_vfmaddsub231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3488 /* 0xb7 */ iemOp_InvalidNeedRM, iemOp_vfmsubadd231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3489 /* 0xb8 */ iemOp_InvalidNeedRM, iemOp_vfmadd231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3490 /* 0xb9 */ iemOp_InvalidNeedRM, iemOp_vfmadd231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3491 /* 0xba */ iemOp_InvalidNeedRM, iemOp_vfmsub231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3492 /* 0xbb */ iemOp_InvalidNeedRM, iemOp_vfmsub231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3493 /* 0xbc */ iemOp_InvalidNeedRM, iemOp_vfnmadd231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3494 /* 0xbd */ iemOp_InvalidNeedRM, iemOp_vfnmadd231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3495 /* 0xbe */ iemOp_InvalidNeedRM, iemOp_vfnmsub231ps_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3496 /* 0xbf */ iemOp_InvalidNeedRM, iemOp_vfnmsub231ss_d_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3497
3498 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
3499 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
3500 /* 0xc2 */ IEMOP_X4(iemOp_InvalidNeedRM),
3501 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
3502 /* 0xc4 */ IEMOP_X4(iemOp_InvalidNeedRM),
3503 /* 0xc5 */ IEMOP_X4(iemOp_InvalidNeedRM),
3504 /* 0xc6 */ IEMOP_X4(iemOp_InvalidNeedRM),
3505 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
3506 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
3507 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
3508 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
3509 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
3510 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
3511 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
3512 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
3513 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
3514
3515 /* 0xd0 */ IEMOP_X4(iemOp_InvalidNeedRM),
3516 /* 0xd1 */ IEMOP_X4(iemOp_InvalidNeedRM),
3517 /* 0xd2 */ IEMOP_X4(iemOp_InvalidNeedRM),
3518 /* 0xd3 */ IEMOP_X4(iemOp_InvalidNeedRM),
3519 /* 0xd4 */ IEMOP_X4(iemOp_InvalidNeedRM),
3520 /* 0xd5 */ IEMOP_X4(iemOp_InvalidNeedRM),
3521 /* 0xd6 */ IEMOP_X4(iemOp_InvalidNeedRM),
3522 /* 0xd7 */ IEMOP_X4(iemOp_InvalidNeedRM),
3523 /* 0xd8 */ IEMOP_X4(iemOp_InvalidNeedRM),
3524 /* 0xd9 */ IEMOP_X4(iemOp_InvalidNeedRM),
3525 /* 0xda */ IEMOP_X4(iemOp_InvalidNeedRM),
3526 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vaesimc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3527 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vaesenc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3528 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vaesenclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3529 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vaesdec_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3530 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vaesdeclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3531
3532 /* 0xe0 */ IEMOP_X4(iemOp_InvalidNeedRM),
3533 /* 0xe1 */ IEMOP_X4(iemOp_InvalidNeedRM),
3534 /* 0xe2 */ IEMOP_X4(iemOp_InvalidNeedRM),
3535 /* 0xe3 */ IEMOP_X4(iemOp_InvalidNeedRM),
3536 /* 0xe4 */ IEMOP_X4(iemOp_InvalidNeedRM),
3537 /* 0xe5 */ IEMOP_X4(iemOp_InvalidNeedRM),
3538 /* 0xe6 */ IEMOP_X4(iemOp_InvalidNeedRM),
3539 /* 0xe7 */ IEMOP_X4(iemOp_InvalidNeedRM),
3540 /* 0xe8 */ IEMOP_X4(iemOp_InvalidNeedRM),
3541 /* 0xe9 */ IEMOP_X4(iemOp_InvalidNeedRM),
3542 /* 0xea */ IEMOP_X4(iemOp_InvalidNeedRM),
3543 /* 0xeb */ IEMOP_X4(iemOp_InvalidNeedRM),
3544 /* 0xec */ IEMOP_X4(iemOp_InvalidNeedRM),
3545 /* 0xed */ IEMOP_X4(iemOp_InvalidNeedRM),
3546 /* 0xee */ IEMOP_X4(iemOp_InvalidNeedRM),
3547 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRM),
3548
3549 /* 0xf0 */ IEMOP_X4(iemOp_InvalidNeedRM),
3550 /* 0xf1 */ IEMOP_X4(iemOp_InvalidNeedRM),
3551 /* 0xf2 */ iemOp_andn_Gy_By_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3552 /* 0xf3 */ iemOp_VGrp17_f3, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
3553 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRM),
3554 /* 0xf5 */ iemOp_bzhi_Gy_Ey_By, iemOp_InvalidNeedRM, iemOp_pext_Gy_By_Ey, iemOp_pdep_Gy_By_Ey,
3555 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_mulx_By_Gy_rDX_Ey,
3556 /* 0xf7 */ iemOp_bextr_Gy_Ey_By, iemOp_shlx_Gy_Ey_By, iemOp_sarx_Gy_Ey_By, iemOp_shrx_Gy_Ey_By,
3557 /* 0xf8 */ IEMOP_X4(iemOp_InvalidNeedRM),
3558 /* 0xf9 */ IEMOP_X4(iemOp_InvalidNeedRM),
3559 /* 0xfa */ IEMOP_X4(iemOp_InvalidNeedRM),
3560 /* 0xfb */ IEMOP_X4(iemOp_InvalidNeedRM),
3561 /* 0xfc */ IEMOP_X4(iemOp_InvalidNeedRM),
3562 /* 0xfd */ IEMOP_X4(iemOp_InvalidNeedRM),
3563 /* 0xfe */ IEMOP_X4(iemOp_InvalidNeedRM),
3564 /* 0xff */ IEMOP_X4(iemOp_InvalidNeedRM),
3565};
3566AssertCompile(RT_ELEMENTS(g_apfnVexMap2) == 1024);
3567
3568/** @} */
3569
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette