VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap3.cpp.h@ 99324

Last change on this file since 99324 was 99324, checked in by vboxsync, 19 months ago

VMM/IEM: Use IEMOP_HLP_DONE_VEX_DECODING_*() rather than IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT or IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT to check for AVX and AVX2 cpuid bits, since the latter two are for runtime checks while the former for the decoding stage. OTOH, the AVX CPUID check is unnecessary in the VexMap files, since the VEX prefixes already checks for it - but that can be optimized some other time. Fixed a number of AVX2/AVX mixups resulting from copy&paste or laziness. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 56.5 KB
Line 
1/* $Id: IEMAllInstructionsVexMap3.cpp.h 99324 2023-04-06 23:34:00Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation, 0x0f 0x3a map.
4 *
5 * @remarks IEMAllInstructionsThree0f3a.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 3
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128, imm8
39 * - vpxxx ymm0, ymm1, ymm2/mem256, imm8
40 *
41 * Takes function table for function w/o implicit state parameter.
42 *
43 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
44 */
45FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF3IMM8, pImpl)
46{
47 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
48 if (IEM_IS_MODRM_REG_MODE(bRm))
49 {
50 /*
51 * Register, register.
52 */
53 if (pVCpu->iem.s.uVexLength)
54 {
55 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
56 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
57 IEM_MC_BEGIN(4, 3);
58 IEM_MC_LOCAL(RTUINT256U, uDst);
59 IEM_MC_LOCAL(RTUINT256U, uSrc1);
60 IEM_MC_LOCAL(RTUINT256U, uSrc2);
61 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
62 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
63 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
64 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
65 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
66 IEM_MC_PREPARE_AVX_USAGE();
67 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
68 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
69 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
70 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
71 IEM_MC_ADVANCE_RIP_AND_FINISH();
72 IEM_MC_END();
73 }
74 else
75 {
76 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
77 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
78 IEM_MC_BEGIN(4, 0);
79 IEM_MC_ARG(PRTUINT128U, puDst, 0);
80 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
81 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
82 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
83 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
84 IEM_MC_PREPARE_AVX_USAGE();
85 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
86 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
87 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
88 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
89 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
90 IEM_MC_ADVANCE_RIP_AND_FINISH();
91 IEM_MC_END();
92 }
93 }
94 else
95 {
96 /*
97 * Register, memory.
98 */
99 if (pVCpu->iem.s.uVexLength)
100 {
101 IEM_MC_BEGIN(4, 4);
102 IEM_MC_LOCAL(RTUINT256U, uDst);
103 IEM_MC_LOCAL(RTUINT256U, uSrc1);
104 IEM_MC_LOCAL(RTUINT256U, uSrc2);
105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
106 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
107 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
108 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
109
110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
111 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
112 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
113 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
114 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
115 IEM_MC_PREPARE_AVX_USAGE();
116
117 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
118 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
119 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
120 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
121
122 IEM_MC_ADVANCE_RIP_AND_FINISH();
123 IEM_MC_END();
124 }
125 else
126 {
127 IEM_MC_BEGIN(4, 2);
128 IEM_MC_LOCAL(RTUINT128U, uSrc2);
129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
130 IEM_MC_ARG(PRTUINT128U, puDst, 0);
131 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
132 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
133
134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
135 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
136 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
137 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
138 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
139 IEM_MC_PREPARE_AVX_USAGE();
140
141 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
142 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
143 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
144 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
145 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
146
147 IEM_MC_ADVANCE_RIP_AND_FINISH();
148 IEM_MC_END();
149 }
150 }
151}
152
153
154/**
155 * Common worker for AVX instructions on the forms:
156 * - vblendps/d xmm0, xmm1, xmm2/mem128, imm8
157 * - vblendps/d ymm0, ymm1, ymm2/mem256, imm8
158 *
159 * Takes function table for function w/o implicit state parameter.
160 *
161 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation.
162 */
163FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF3IMM8, pImpl)
164{
165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
166 if (IEM_IS_MODRM_REG_MODE(bRm))
167 {
168 /*
169 * Register, register.
170 */
171 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
172 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
173 if (pVCpu->iem.s.uVexLength)
174 {
175 IEM_MC_BEGIN(4, 3);
176 IEM_MC_LOCAL(RTUINT256U, uDst);
177 IEM_MC_LOCAL(RTUINT256U, uSrc1);
178 IEM_MC_LOCAL(RTUINT256U, uSrc2);
179 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
180 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
181 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
182 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
183 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
184 IEM_MC_PREPARE_AVX_USAGE();
185 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
186 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
187 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
188 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
189 IEM_MC_ADVANCE_RIP_AND_FINISH();
190 IEM_MC_END();
191 }
192 else
193 {
194 IEM_MC_BEGIN(4, 0);
195 IEM_MC_ARG(PRTUINT128U, puDst, 0);
196 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
197 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
198 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
199 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
200 IEM_MC_PREPARE_AVX_USAGE();
201 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
202 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
203 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
204 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
205 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
206 IEM_MC_ADVANCE_RIP_AND_FINISH();
207 IEM_MC_END();
208 }
209 }
210 else
211 {
212 /*
213 * Register, memory.
214 */
215 if (pVCpu->iem.s.uVexLength)
216 {
217 IEM_MC_BEGIN(4, 4);
218 IEM_MC_LOCAL(RTUINT256U, uDst);
219 IEM_MC_LOCAL(RTUINT256U, uSrc1);
220 IEM_MC_LOCAL(RTUINT256U, uSrc2);
221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
222 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
223 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
224 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
225
226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
227 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
228 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
229 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
230 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
231 IEM_MC_PREPARE_AVX_USAGE();
232
233 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
234 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
235 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
236 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
237
238 IEM_MC_ADVANCE_RIP_AND_FINISH();
239 IEM_MC_END();
240 }
241 else
242 {
243 IEM_MC_BEGIN(4, 2);
244 IEM_MC_LOCAL(RTUINT128U, uSrc2);
245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
246 IEM_MC_ARG(PRTUINT128U, puDst, 0);
247 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
248 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
249
250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
251 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
252 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
253 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
254 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
255 IEM_MC_PREPARE_AVX_USAGE();
256
257 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
258 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
259 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
260 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
261 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
262
263 IEM_MC_ADVANCE_RIP_AND_FINISH();
264 IEM_MC_END();
265 }
266 }
267}
268
269
270/** Opcode VEX.66.0F3A 0x00. */
271FNIEMOP_STUB(iemOp_vpermq_Vqq_Wqq_Ib);
272/** Opcode VEX.66.0F3A 0x01. */
273FNIEMOP_STUB(iemOp_vpermqd_Vqq_Wqq_Ib);
274/** Opcode VEX.66.0F3A 0x02. */
275FNIEMOP_STUB(iemOp_vpblendd_Vx_Wx_Ib);
276/* Opcode VEX.66.0F3A 0x03 - invalid */
277/** Opcode VEX.66.0F3A 0x04. */
278FNIEMOP_STUB(iemOp_vpermilps_Vx_Wx_Ib);
279/** Opcode VEX.66.0F3A 0x05. */
280FNIEMOP_STUB(iemOp_vpermilpd_Vx_Wx_Ib);
281/** Opcode VEX.66.0F3A 0x06 (vex only) */
282FNIEMOP_STUB(iemOp_vperm2f128_Vqq_Hqq_Wqq_Ib);
283/* Opcode VEX.66.0F3A 0x07 - invalid */
284/** Opcode VEX.66.0F3A 0x08. */
285FNIEMOP_STUB(iemOp_vroundps_Vx_Wx_Ib);
286/** Opcode VEX.66.0F3A 0x09. */
287FNIEMOP_STUB(iemOp_vroundpd_Vx_Wx_Ib);
288/** Opcode VEX.66.0F3A 0x0a. */
289FNIEMOP_STUB(iemOp_vroundss_Vss_Wss_Ib);
290/** Opcode VEX.66.0F3A 0x0b. */
291FNIEMOP_STUB(iemOp_vroundsd_Vsd_Wsd_Ib);
292
293
294/** Opcode VEX.66.0F3A 0x0c.
295 * AVX,AVX */
296FNIEMOP_DEF(iemOp_vblendps_Vx_Hx_Wx_Ib)
297{
298 IEMOP_MNEMONIC3(VEX_RVM, VBLENDPS, vblendps, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */
299 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vblendps);
300 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
301}
302
303
304/** Opcode VEX.66.0F3A 0x0d.
305 * AVX,AVX */
306FNIEMOP_DEF(iemOp_vblendpd_Vx_Hx_Wx_Ib)
307{
308 IEMOP_MNEMONIC3(VEX_RVM, VBLENDPD, vblendpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */
309 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vblendpd);
310 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
311}
312
313
314/** Opcode VEX.66.0F3A 0x0e.
315 * AVX,AVX2 */
316FNIEMOP_DEF(iemOp_vpblendw_Vx_Hx_Wx_Ib)
317{
318 IEMOP_MNEMONIC3(VEX_RVM, VPBLENDW, vpblendw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */
319 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpblendw);
320 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
321}
322
323
324/** Opcode VEX.0F3A 0x0f - invalid. */
325
326
327/** Opcode VEX.66.0F3A 0x0f.
328 * AVX,AVX2 */
329FNIEMOP_DEF(iemOp_vpalignr_Vx_Hx_Wx_Ib)
330{
331 IEMOP_MNEMONIC3(VEX_RVM, VPALIGNR, vpalignr, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */
332 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpalignr);
333 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
334}
335
336
337/* Opcode VEX.66.0F3A 0x10 - invalid */
338/* Opcode VEX.66.0F3A 0x11 - invalid */
339/* Opcode VEX.66.0F3A 0x12 - invalid */
340/* Opcode VEX.66.0F3A 0x13 - invalid */
341/** Opcode VEX.66.0F3A 0x14. */
342FNIEMOP_STUB(iemOp_vpextrb_RdMb_Vdq_Ib);
343/** Opcode VEX.66.0F3A 0x15. */
344FNIEMOP_STUB(iemOp_vpextrw_RdMw_Vdq_Ib);
345/** Opcode VEX.66.0F3A 0x16. */
346FNIEMOP_STUB(iemOp_vpextrd_q_RdMw_Vdq_Ib);
347/** Opcode VEX.66.0F3A 0x17. */
348FNIEMOP_STUB(iemOp_vextractps_Ed_Vdq_Ib);
349/** Opcode VEX.66.0F3A 0x18 (vex only). */
350FNIEMOP_STUB(iemOp_vinsertf128_Vqq_Hqq_Wqq_Ib);
351/** Opcode VEX.66.0F3A 0x19 (vex only). */
352FNIEMOP_STUB(iemOp_vextractf128_Wdq_Vqq_Ib);
353/* Opcode VEX.66.0F3A 0x1a - invalid */
354/* Opcode VEX.66.0F3A 0x1b - invalid */
355/* Opcode VEX.66.0F3A 0x1c - invalid */
356/** Opcode VEX.66.0F3A 0x1d (vex only). */
357FNIEMOP_STUB(iemOp_vcvtps2ph_Wx_Vx_Ib);
358/* Opcode VEX.66.0F3A 0x1e - invalid */
359/* Opcode VEX.66.0F3A 0x1f - invalid */
360
361
362/** Opcode VEX.66.0F3A 0x20. */
363FNIEMOP_STUB(iemOp_vpinsrb_Vdq_Hdq_RyMb_Ib);
364/** Opcode VEX.66.0F3A 0x21, */
365FNIEMOP_STUB(iemOp_vinsertps_Vdq_Hdq_UdqMd_Ib);
366/** Opcode VEX.66.0F3A 0x22. */
367FNIEMOP_STUB(iemOp_vpinsrd_q_Vdq_Hdq_Ey_Ib);
368/* Opcode VEX.66.0F3A 0x23 - invalid */
369/* Opcode VEX.66.0F3A 0x24 - invalid */
370/* Opcode VEX.66.0F3A 0x25 - invalid */
371/* Opcode VEX.66.0F3A 0x26 - invalid */
372/* Opcode VEX.66.0F3A 0x27 - invalid */
373/* Opcode VEX.66.0F3A 0x28 - invalid */
374/* Opcode VEX.66.0F3A 0x29 - invalid */
375/* Opcode VEX.66.0F3A 0x2a - invalid */
376/* Opcode VEX.66.0F3A 0x2b - invalid */
377/* Opcode VEX.66.0F3A 0x2c - invalid */
378/* Opcode VEX.66.0F3A 0x2d - invalid */
379/* Opcode VEX.66.0F3A 0x2e - invalid */
380/* Opcode VEX.66.0F3A 0x2f - invalid */
381
382
383/* Opcode VEX.66.0F3A 0x30 - invalid */
384/* Opcode VEX.66.0F3A 0x31 - invalid */
385/* Opcode VEX.66.0F3A 0x32 - invalid */
386/* Opcode VEX.66.0F3A 0x33 - invalid */
387/* Opcode VEX.66.0F3A 0x34 - invalid */
388/* Opcode VEX.66.0F3A 0x35 - invalid */
389/* Opcode VEX.66.0F3A 0x36 - invalid */
390/* Opcode VEX.66.0F3A 0x37 - invalid */
391/** Opcode VEX.66.0F3A 0x38 (vex only). */
392FNIEMOP_STUB(iemOp_vinserti128_Vqq_Hqq_Wqq_Ib);
393/** Opcode VEX.66.0F3A 0x39 (vex only). */
394FNIEMOP_STUB(iemOp_vextracti128_Wdq_Vqq_Ib);
395/* Opcode VEX.66.0F3A 0x3a - invalid */
396/* Opcode VEX.66.0F3A 0x3b - invalid */
397/* Opcode VEX.66.0F3A 0x3c - invalid */
398/* Opcode VEX.66.0F3A 0x3d - invalid */
399/* Opcode VEX.66.0F3A 0x3e - invalid */
400/* Opcode VEX.66.0F3A 0x3f - invalid */
401
402
403/** Opcode VEX.66.0F3A 0x40. */
404FNIEMOP_STUB(iemOp_vdpps_Vx_Hx_Wx_Ib);
405/** Opcode VEX.66.0F3A 0x41, */
406FNIEMOP_STUB(iemOp_vdppd_Vdq_Hdq_Wdq_Ib);
407/** Opcode VEX.66.0F3A 0x42. */
408FNIEMOP_STUB(iemOp_vmpsadbw_Vx_Hx_Wx_Ib);
409/* Opcode VEX.66.0F3A 0x43 - invalid */
410
411
412/** Opcode VEX.66.0F3A 0x44. */
413FNIEMOP_DEF(iemOp_vpclmulqdq_Vdq_Hdq_Wdq_Ib)
414{
415 //IEMOP_MNEMONIC3(VEX_RVM, VPCLMULQDQ, vpclmulqdq, Vdq, Hdq, Wdq, DISOPTYPE_HARMLESS, 0); /* @todo */
416
417 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
418 if (IEM_IS_MODRM_REG_MODE(bRm))
419 {
420 /*
421 * Register, register.
422 */
423 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
424 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fPclMul);
425 IEM_MC_BEGIN(4, 0);
426 IEM_MC_ARG(PRTUINT128U, puDst, 0);
427 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
428 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
429 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
430 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
431 IEM_MC_PREPARE_AVX_USAGE();
432 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
433 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
434 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
435 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fPclMul, iemAImpl_vpclmulqdq_u128, iemAImpl_vpclmulqdq_u128_fallback),
436 puDst, puSrc1, puSrc2, bImmArg);
437 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
438 IEM_MC_ADVANCE_RIP_AND_FINISH();
439 IEM_MC_END();
440 }
441 else
442 {
443 /*
444 * Register, memory.
445 */
446 IEM_MC_BEGIN(4, 2);
447 IEM_MC_LOCAL(RTUINT128U, uSrc2);
448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
449 IEM_MC_ARG(PRTUINT128U, puDst, 0);
450 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
451 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
452
453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
454 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
455 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
456 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fPclMul);
457 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
458 IEM_MC_PREPARE_AVX_USAGE();
459
460 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
461 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
462 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
463 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fPclMul, iemAImpl_vpclmulqdq_u128, iemAImpl_vpclmulqdq_u128_fallback),
464 puDst, puSrc1, puSrc2, bImmArg);
465 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
466
467 IEM_MC_ADVANCE_RIP_AND_FINISH();
468 IEM_MC_END();
469 }
470}
471
472
473/* Opcode VEX.66.0F3A 0x45 - invalid */
474/** Opcode VEX.66.0F3A 0x46 (vex only) */
475FNIEMOP_STUB(iemOp_vperm2i128_Vqq_Hqq_Wqq_Ib);
476/* Opcode VEX.66.0F3A 0x47 - invalid */
477/** Opcode VEX.66.0F3A 0x48 (AMD tables only). */
478FNIEMOP_STUB(iemOp_vperlmilzz2ps_Vx_Hx_Wp_Lx);
479/** Opcode VEX.66.0F3A 0x49 (AMD tables only). */
480FNIEMOP_STUB(iemOp_vperlmilzz2pd_Vx_Hx_Wp_Lx);
481
482
483/**
484 * Common worker for AVX2 instructions on the forms:
485 * - vblendvps/d xmm0, xmm1, xmm2/mem128, xmm4
486 * - vblendvps/d ymm0, ymm1, ymm2/mem256, ymm4
487 *
488 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operations.
489 */
490FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, PCIEMOPBLENDOP, pImpl)
491{
492 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
493 if (IEM_IS_MODRM_REG_MODE(bRm))
494 {
495 /*
496 * Register, register.
497 */
498 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
499 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
500 if (pVCpu->iem.s.uVexLength)
501 {
502 IEM_MC_BEGIN(4, 4);
503 IEM_MC_LOCAL(RTUINT256U, uDst);
504 IEM_MC_LOCAL(RTUINT256U, uSrc1);
505 IEM_MC_LOCAL(RTUINT256U, uSrc2);
506 IEM_MC_LOCAL(RTUINT256U, uSrc3);
507 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
508 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
509 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
510 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
511 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
512 IEM_MC_PREPARE_AVX_USAGE();
513 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
514 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
515 IEM_MC_FETCH_YREG_U256(uSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
516 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
517 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
518 IEM_MC_ADVANCE_RIP_AND_FINISH();
519 IEM_MC_END();
520 }
521 else
522 {
523 IEM_MC_BEGIN(4, 0);
524 IEM_MC_ARG(PRTUINT128U, puDst, 0);
525 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
526 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
527 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
528 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
529 IEM_MC_PREPARE_AVX_USAGE();
530 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
531 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
532 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
533 IEM_MC_REF_XREG_U128_CONST(puSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
534 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
535 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
536 IEM_MC_ADVANCE_RIP_AND_FINISH();
537 IEM_MC_END();
538 }
539 }
540 else
541 {
542 /*
543 * Register, memory.
544 */
545 if (pVCpu->iem.s.uVexLength)
546 {
547 IEM_MC_BEGIN(4, 5);
548 IEM_MC_LOCAL(RTUINT256U, uDst);
549 IEM_MC_LOCAL(RTUINT256U, uSrc1);
550 IEM_MC_LOCAL(RTUINT256U, uSrc2);
551 IEM_MC_LOCAL(RTUINT256U, uSrc3);
552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
553 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
554 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
555 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
556 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
557
558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
559 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
560
561 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
562 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
563 IEM_MC_PREPARE_AVX_USAGE();
564
565 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
566 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
567 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_EFFECTIVE_VVVV(pVCpu));
568 IEM_MC_FETCH_YREG_U256(uSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
569 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
570 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
571
572 IEM_MC_ADVANCE_RIP_AND_FINISH();
573 IEM_MC_END();
574 }
575 else
576 {
577 IEM_MC_BEGIN(4, 2);
578 IEM_MC_LOCAL(RTUINT128U, uSrc2);
579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
580 IEM_MC_ARG(PRTUINT128U, puDst, 0);
581 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
582 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
583 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
584
585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
586 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
587
588 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
589 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
590 IEM_MC_PREPARE_AVX_USAGE();
591
592 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
593 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
594 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
595 IEM_MC_REF_XREG_U128_CONST(puSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
596 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
597 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
598
599 IEM_MC_ADVANCE_RIP_AND_FINISH();
600 IEM_MC_END();
601 }
602 }
603}
604
605
606/** Opcode VEX.66.0F3A 0x4a (vex only).
607 * AVX, AVX */
608FNIEMOP_DEF(iemOp_vblendvps_Vx_Hx_Wx_Lx)
609{
610 //IEMOP_MNEMONIC4(VEX_RVM, VBLENDVPS, vpblendvps, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0); @todo
611 IEMOPBLENDOP_INIT_VARS(vblendvps);
612 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
613}
614
615
616/** Opcode VEX.66.0F3A 0x4b (vex only).
617 * AVX, AVX */
618FNIEMOP_DEF(iemOp_vblendvpd_Vx_Hx_Wx_Lx)
619{
620 //IEMOP_MNEMONIC4(VEX_RVM, VPBLENDVPD, blendvpd, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0); @todo
621 IEMOPBLENDOP_INIT_VARS(vblendvpd);
622 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
623}
624
625
626/**
627 * Common worker for AVX2 instructions on the forms:
628 * - vpxxx xmm0, xmm1, xmm2/mem128, xmm4
629 * - vpxxx ymm0, ymm1, ymm2/mem256, ymm4
630 *
631 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
632 */
633FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Lx, PCIEMOPBLENDOP, pImpl)
634{
635 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
636 if (IEM_IS_MODRM_REG_MODE(bRm))
637 {
638 /*
639 * Register, register.
640 */
641 if (pVCpu->iem.s.uVexLength)
642 {
643 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
644
645 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
646 IEM_MC_BEGIN(4, 4);
647 IEM_MC_LOCAL(RTUINT256U, uDst);
648 IEM_MC_LOCAL(RTUINT256U, uSrc1);
649 IEM_MC_LOCAL(RTUINT256U, uSrc2);
650 IEM_MC_LOCAL(RTUINT256U, uSrc3);
651 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
652 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
653 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
654 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
655 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
656 IEM_MC_PREPARE_AVX_USAGE();
657 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
658 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
659 IEM_MC_FETCH_YREG_U256(uSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
660 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
661 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
662 IEM_MC_ADVANCE_RIP_AND_FINISH();
663 IEM_MC_END();
664 }
665 else
666 {
667 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
668
669 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
670 IEM_MC_BEGIN(4, 0);
671 IEM_MC_ARG(PRTUINT128U, puDst, 0);
672 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
673 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
674 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
675 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
676 IEM_MC_PREPARE_AVX_USAGE();
677 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
678 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
679 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
680 IEM_MC_REF_XREG_U128_CONST(puSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
681 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
682 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
683 IEM_MC_ADVANCE_RIP_AND_FINISH();
684 IEM_MC_END();
685 }
686 }
687 else
688 {
689 /*
690 * Register, memory.
691 */
692 if (pVCpu->iem.s.uVexLength)
693 {
694 IEM_MC_BEGIN(4, 5);
695 IEM_MC_LOCAL(RTUINT256U, uDst);
696 IEM_MC_LOCAL(RTUINT256U, uSrc1);
697 IEM_MC_LOCAL(RTUINT256U, uSrc2);
698 IEM_MC_LOCAL(RTUINT256U, uSrc3);
699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
700 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
701 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
702 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
703 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
704
705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
706 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
707
708 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
709 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
710 IEM_MC_PREPARE_AVX_USAGE();
711
712 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
713 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
714 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_EFFECTIVE_VVVV(pVCpu));
715 IEM_MC_FETCH_YREG_U256(uSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
716 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
717 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
718
719 IEM_MC_ADVANCE_RIP_AND_FINISH();
720 IEM_MC_END();
721 }
722 else
723 {
724 IEM_MC_BEGIN(4, 2);
725 IEM_MC_LOCAL(RTUINT128U, uSrc2);
726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
727 IEM_MC_ARG(PRTUINT128U, puDst, 0);
728 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
729 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
730 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
731
732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
733 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
734
735 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
736 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
737 IEM_MC_PREPARE_AVX_USAGE();
738
739 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
740 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
741 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
742 IEM_MC_REF_XREG_U128_CONST(puSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */
743 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
744 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
745
746 IEM_MC_ADVANCE_RIP_AND_FINISH();
747 IEM_MC_END();
748 }
749 }
750}
751
752
753/** Opcode VEX.66.0F3A 0x4c (vex only).
754 * AVX, AVX2 */
755FNIEMOP_DEF(iemOp_vpblendvb_Vx_Hx_Wx_Lx)
756{
757 //IEMOP_MNEMONIC4(VEX_RVM, VPBLENDVB, vpblendvb, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0); @todo
758 IEMOPBLENDOP_INIT_VARS(vpblendvb);
759 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
760}
761
762
763/* Opcode VEX.66.0F3A 0x4d - invalid */
764/* Opcode VEX.66.0F3A 0x4e - invalid */
765/* Opcode VEX.66.0F3A 0x4f - invalid */
766
767
768/* Opcode VEX.66.0F3A 0x50 - invalid */
769/* Opcode VEX.66.0F3A 0x51 - invalid */
770/* Opcode VEX.66.0F3A 0x52 - invalid */
771/* Opcode VEX.66.0F3A 0x53 - invalid */
772/* Opcode VEX.66.0F3A 0x54 - invalid */
773/* Opcode VEX.66.0F3A 0x55 - invalid */
774/* Opcode VEX.66.0F3A 0x56 - invalid */
775/* Opcode VEX.66.0F3A 0x57 - invalid */
776/* Opcode VEX.66.0F3A 0x58 - invalid */
777/* Opcode VEX.66.0F3A 0x59 - invalid */
778/* Opcode VEX.66.0F3A 0x5a - invalid */
779/* Opcode VEX.66.0F3A 0x5b - invalid */
780/** Opcode VEX.66.0F3A 0x5c (AMD tables only). */
781FNIEMOP_STUB(iemOp_vfmaddsubps_Vx_Lx_Wx_Hx);
782/** Opcode VEX.66.0F3A 0x5d (AMD tables only). */
783FNIEMOP_STUB(iemOp_vfmaddsubpd_Vx_Lx_Wx_Hx);
784/** Opcode VEX.66.0F3A 0x5e (AMD tables only). */
785FNIEMOP_STUB(iemOp_vfmsubaddps_Vx_Lx_Wx_Hx);
786/** Opcode VEX.66.0F3A 0x5f (AMD tables only). */
787FNIEMOP_STUB(iemOp_vfmsubaddpd_Vx_Lx_Wx_Hx);
788
789
790/** Opcode VEX.66.0F3A 0x60. */
791FNIEMOP_STUB(iemOp_vpcmpestrm_Vdq_Wdq_Ib);
792/** Opcode VEX.66.0F3A 0x61, */
793FNIEMOP_STUB(iemOp_vpcmpestri_Vdq_Wdq_Ib);
794/** Opcode VEX.66.0F3A 0x62. */
795FNIEMOP_STUB(iemOp_vpcmpistrm_Vdq_Wdq_Ib);
796/** Opcode VEX.66.0F3A 0x63*/
797FNIEMOP_STUB(iemOp_vpcmpistri_Vdq_Wdq_Ib);
798/* Opcode VEX.66.0F3A 0x64 - invalid */
799/* Opcode VEX.66.0F3A 0x65 - invalid */
800/* Opcode VEX.66.0F3A 0x66 - invalid */
801/* Opcode VEX.66.0F3A 0x67 - invalid */
802/** Opcode VEX.66.0F3A 0x68 (AMD tables only). */
803FNIEMOP_STUB(iemOp_vfmaddps_Vx_Lx_Wx_Hx);
804/** Opcode VEX.66.0F3A 0x69 (AMD tables only). */
805FNIEMOP_STUB(iemOp_vfmaddpd_Vx_Lx_Wx_Hx);
806/** Opcode VEX.66.0F3A 0x6a (AMD tables only). */
807FNIEMOP_STUB(iemOp_vfmaddss_Vx_Lx_Wx_Hx);
808/** Opcode VEX.66.0F3A 0x6b (AMD tables only). */
809FNIEMOP_STUB(iemOp_vfmaddsd_Vx_Lx_Wx_Hx);
810/** Opcode VEX.66.0F3A 0x6c (AMD tables only). */
811FNIEMOP_STUB(iemOp_vfmsubps_Vx_Lx_Wx_Hx);
812/** Opcode VEX.66.0F3A 0x6d (AMD tables only). */
813FNIEMOP_STUB(iemOp_vfmsubpd_Vx_Lx_Wx_Hx);
814/** Opcode VEX.66.0F3A 0x6e (AMD tables only). */
815FNIEMOP_STUB(iemOp_vfmsubss_Vx_Lx_Wx_Hx);
816/** Opcode VEX.66.0F3A 0x6f (AMD tables only). */
817FNIEMOP_STUB(iemOp_vfmsubsd_Vx_Lx_Wx_Hx);
818
819/* Opcode VEX.66.0F3A 0x70 - invalid */
820/* Opcode VEX.66.0F3A 0x71 - invalid */
821/* Opcode VEX.66.0F3A 0x72 - invalid */
822/* Opcode VEX.66.0F3A 0x73 - invalid */
823/* Opcode VEX.66.0F3A 0x74 - invalid */
824/* Opcode VEX.66.0F3A 0x75 - invalid */
825/* Opcode VEX.66.0F3A 0x76 - invalid */
826/* Opcode VEX.66.0F3A 0x77 - invalid */
827/** Opcode VEX.66.0F3A 0x78 (AMD tables only). */
828FNIEMOP_STUB(iemOp_vfnmaddps_Vx_Lx_Wx_Hx);
829/** Opcode VEX.66.0F3A 0x79 (AMD tables only). */
830FNIEMOP_STUB(iemOp_vfnmaddpd_Vx_Lx_Wx_Hx);
831/** Opcode VEX.66.0F3A 0x7a (AMD tables only). */
832FNIEMOP_STUB(iemOp_vfnmaddss_Vx_Lx_Wx_Hx);
833/** Opcode VEX.66.0F3A 0x7b (AMD tables only). */
834FNIEMOP_STUB(iemOp_vfnmaddsd_Vx_Lx_Wx_Hx);
835/** Opcode VEX.66.0F3A 0x7c (AMD tables only). */
836FNIEMOP_STUB(iemOp_vfnmsubps_Vx_Lx_Wx_Hx);
837/** Opcode VEX.66.0F3A 0x7d (AMD tables only). */
838FNIEMOP_STUB(iemOp_vfnmsubpd_Vx_Lx_Wx_Hx);
839/** Opcode VEX.66.0F3A 0x7e (AMD tables only). */
840FNIEMOP_STUB(iemOp_vfnmsubss_Vx_Lx_Wx_Hx);
841/** Opcode VEX.66.0F3A 0x7f (AMD tables only). */
842FNIEMOP_STUB(iemOp_vfnmsubsd_Vx_Lx_Wx_Hx);
843
844/* Opcodes 0x0f 0x80 thru 0x0f 0xb0 are unused. */
845
846
847/* Opcode 0x0f 0xc0 - invalid */
848/* Opcode 0x0f 0xc1 - invalid */
849/* Opcode 0x0f 0xc2 - invalid */
850/* Opcode 0x0f 0xc3 - invalid */
851/* Opcode 0x0f 0xc4 - invalid */
852/* Opcode 0x0f 0xc5 - invalid */
853/* Opcode 0x0f 0xc6 - invalid */
854/* Opcode 0x0f 0xc7 - invalid */
855/* Opcode 0x0f 0xc8 - invalid */
856/* Opcode 0x0f 0xc9 - invalid */
857/* Opcode 0x0f 0xca - invalid */
858/* Opcode 0x0f 0xcb - invalid */
859/* Opcode 0x0f 0xcc */
860FNIEMOP_STUB(iemOp_vsha1rnds4_Vdq_Wdq_Ib);
861/* Opcode 0x0f 0xcd - invalid */
862/* Opcode 0x0f 0xce - invalid */
863/* Opcode 0x0f 0xcf - invalid */
864
865
866/* Opcode VEX.66.0F3A 0xd0 - invalid */
867/* Opcode VEX.66.0F3A 0xd1 - invalid */
868/* Opcode VEX.66.0F3A 0xd2 - invalid */
869/* Opcode VEX.66.0F3A 0xd3 - invalid */
870/* Opcode VEX.66.0F3A 0xd4 - invalid */
871/* Opcode VEX.66.0F3A 0xd5 - invalid */
872/* Opcode VEX.66.0F3A 0xd6 - invalid */
873/* Opcode VEX.66.0F3A 0xd7 - invalid */
874/* Opcode VEX.66.0F3A 0xd8 - invalid */
875/* Opcode VEX.66.0F3A 0xd9 - invalid */
876/* Opcode VEX.66.0F3A 0xda - invalid */
877/* Opcode VEX.66.0F3A 0xdb - invalid */
878/* Opcode VEX.66.0F3A 0xdc - invalid */
879/* Opcode VEX.66.0F3A 0xdd - invalid */
880/* Opcode VEX.66.0F3A 0xde - invalid */
881/* Opcode VEX.66.0F3A 0xdf - (aeskeygenassist). */
882FNIEMOP_STUB(iemOp_vaeskeygen_Vdq_Wdq_Ib);
883
884
885/** Opcode VEX.F2.0F3A (vex only) */
886FNIEMOP_DEF(iemOp_rorx_Gy_Ey_Ib)
887{
888 IEMOP_MNEMONIC3(VEX_RMI, RORX, rorx, Gy, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_V_ZERO);
889 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi2)
890 return iemOp_InvalidNeedRMImm8(pVCpu);
891 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
892 if (IEM_IS_MODRM_REG_MODE(bRm))
893 {
894 /*
895 * Register, register.
896 */
897 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
898 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
899 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
900 {
901 IEM_MC_BEGIN(3, 0);
902 IEM_MC_ARG(uint64_t *, pDst, 0);
903 IEM_MC_ARG(uint64_t, uSrc1, 1);
904 IEM_MC_ARG_CONST(uint64_t, uSrc2, bImm8, 2);
905 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
906 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm));
907 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u64, pDst, uSrc1, uSrc2);
908 IEM_MC_ADVANCE_RIP_AND_FINISH();
909 IEM_MC_END();
910 }
911 else
912 {
913 IEM_MC_BEGIN(3, 0);
914 IEM_MC_ARG(uint32_t *, pDst, 0);
915 IEM_MC_ARG(uint32_t, uSrc1, 1);
916 IEM_MC_ARG_CONST(uint32_t, uSrc2, bImm8, 2);
917 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
918 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm));
919 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u32, pDst, uSrc1, uSrc2);
920 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst);
921 IEM_MC_ADVANCE_RIP_AND_FINISH();
922 IEM_MC_END();
923 }
924 }
925 else
926 {
927 /*
928 * Register, memory.
929 */
930 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
931 {
932 IEM_MC_BEGIN(3, 1);
933 IEM_MC_ARG(uint64_t *, pDst, 0);
934 IEM_MC_ARG(uint64_t, uSrc1, 1);
935 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
937 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
938 IEM_MC_ARG_CONST(uint64_t, uSrc2, bImm8, 2);
939 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
940 IEM_MC_FETCH_MEM_U64(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
941 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
942 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u64, pDst, uSrc1, uSrc2);
943 IEM_MC_ADVANCE_RIP_AND_FINISH();
944 IEM_MC_END();
945 }
946 else
947 {
948 IEM_MC_BEGIN(3, 1);
949 IEM_MC_ARG(uint32_t *, pDst, 0);
950 IEM_MC_ARG(uint32_t, uSrc1, 1);
951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
953 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
954 IEM_MC_ARG_CONST(uint32_t, uSrc2, bImm8, 2);
955 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
956 IEM_MC_FETCH_MEM_U32(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
957 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
958 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u32, pDst, uSrc1, uSrc2);
959 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst);
960 IEM_MC_ADVANCE_RIP_AND_FINISH();
961 IEM_MC_END();
962 }
963 }
964}
965
966
967/**
968 * VEX opcode map \#3.
969 *
970 * @sa g_apfnThreeByte0f3a
971 */
972IEM_STATIC const PFNIEMOP g_apfnVexMap3[] =
973{
974 /* no prefix, 066h prefix f3h prefix, f2h prefix */
975 /* 0x00 */ iemOp_InvalidNeedRMImm8, iemOp_vpermq_Vqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
976 /* 0x01 */ iemOp_InvalidNeedRMImm8, iemOp_vpermqd_Vqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
977 /* 0x02 */ iemOp_InvalidNeedRMImm8, iemOp_vpblendd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
978 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
979 /* 0x04 */ iemOp_InvalidNeedRMImm8, iemOp_vpermilps_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
980 /* 0x05 */ iemOp_InvalidNeedRMImm8, iemOp_vpermilpd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
981 /* 0x06 */ iemOp_InvalidNeedRMImm8, iemOp_vperm2f128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
982 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
983 /* 0x08 */ iemOp_InvalidNeedRMImm8, iemOp_vroundps_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
984 /* 0x09 */ iemOp_InvalidNeedRMImm8, iemOp_vroundpd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
985 /* 0x0a */ iemOp_InvalidNeedRMImm8, iemOp_vroundss_Vss_Wss_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
986 /* 0x0b */ iemOp_InvalidNeedRMImm8, iemOp_vroundsd_Vsd_Wsd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
987 /* 0x0c */ iemOp_InvalidNeedRMImm8, iemOp_vblendps_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
988 /* 0x0d */ iemOp_InvalidNeedRMImm8, iemOp_vblendpd_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
989 /* 0x0e */ iemOp_InvalidNeedRMImm8, iemOp_vpblendw_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
990 /* 0x0f */ iemOp_InvalidNeedRMImm8, iemOp_vpalignr_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
991
992 /* 0x10 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
993 /* 0x11 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
994 /* 0x12 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
995 /* 0x13 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
996 /* 0x14 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrb_RdMb_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
997 /* 0x15 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrw_RdMw_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
998 /* 0x16 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrd_q_RdMw_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
999 /* 0x17 */ iemOp_InvalidNeedRMImm8, iemOp_vextractps_Ed_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1000 /* 0x18 */ iemOp_InvalidNeedRMImm8, iemOp_vinsertf128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1001 /* 0x19 */ iemOp_InvalidNeedRMImm8, iemOp_vextractf128_Wdq_Vqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1002 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1003 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1004 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1005 /* 0x1d */ iemOp_InvalidNeedRMImm8, iemOp_vcvtps2ph_Wx_Vx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1006 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1007 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1008
1009 /* 0x20 */ iemOp_InvalidNeedRMImm8, iemOp_vpinsrb_Vdq_Hdq_RyMb_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1010 /* 0x21 */ iemOp_InvalidNeedRMImm8, iemOp_vinsertps_Vdq_Hdq_UdqMd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1011 /* 0x22 */ iemOp_InvalidNeedRMImm8, iemOp_vpinsrd_q_Vdq_Hdq_Ey_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1012 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1013 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1014 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1015 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1016 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1017 /* 0x28 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1018 /* 0x29 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1019 /* 0x2a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1020 /* 0x2b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1021 /* 0x2c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1022 /* 0x2d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1023 /* 0x2e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1024 /* 0x2f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1025
1026 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1027 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1028 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1029 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1030 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1031 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1032 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1033 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1034 /* 0x38 */ iemOp_InvalidNeedRMImm8, iemOp_vinserti128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1035 /* 0x39 */ iemOp_InvalidNeedRMImm8, iemOp_vextracti128_Wdq_Vqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1036 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1037 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1038 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1039 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1040 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1041 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1042
1043 /* 0x40 */ iemOp_InvalidNeedRMImm8, iemOp_vdpps_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1044 /* 0x41 */ iemOp_InvalidNeedRMImm8, iemOp_vdppd_Vdq_Hdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1045 /* 0x42 */ iemOp_InvalidNeedRMImm8, iemOp_vmpsadbw_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1046 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1047 /* 0x44 */ iemOp_InvalidNeedRMImm8, iemOp_vpclmulqdq_Vdq_Hdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1048 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1049 /* 0x46 */ iemOp_InvalidNeedRMImm8, iemOp_vperm2i128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1050 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1051 /* 0x48 */ iemOp_InvalidNeedRMImm8, iemOp_vperlmilzz2ps_Vx_Hx_Wp_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1052 /* 0x49 */ iemOp_InvalidNeedRMImm8, iemOp_vperlmilzz2pd_Vx_Hx_Wp_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1053 /* 0x4a */ iemOp_InvalidNeedRMImm8, iemOp_vblendvps_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1054 /* 0x4b */ iemOp_InvalidNeedRMImm8, iemOp_vblendvpd_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1055 /* 0x4c */ iemOp_InvalidNeedRMImm8, iemOp_vpblendvb_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1056 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1057 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1058 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1059
1060 /* 0x50 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1061 /* 0x51 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1062 /* 0x52 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1063 /* 0x53 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1064 /* 0x54 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1065 /* 0x55 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1066 /* 0x56 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1067 /* 0x57 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1068 /* 0x58 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1069 /* 0x59 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1070 /* 0x5a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1071 /* 0x5b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1072 /* 0x5c */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1073 /* 0x5d */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1074 /* 0x5e */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1075 /* 0x5f */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1076
1077 /* 0x60 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpestrm_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1078 /* 0x61 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpestri_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1079 /* 0x62 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpistrm_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1080 /* 0x63 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpistri_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1081 /* 0x64 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1082 /* 0x65 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1083 /* 0x66 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1084 /* 0x67 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1085 /* 0x68 */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1086 /* 0x69 */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1087 /* 0x6a */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1088 /* 0x6b */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1089 /* 0x6c */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1090 /* 0x6d */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1091 /* 0x6e */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1092 /* 0x6f */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1093
1094 /* 0x70 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1095 /* 0x71 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1096 /* 0x72 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1097 /* 0x73 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1098 /* 0x74 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1099 /* 0x75 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1100 /* 0x76 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1101 /* 0x77 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1102 /* 0x78 */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1103 /* 0x79 */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1104 /* 0x7a */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1105 /* 0x7b */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1106 /* 0x7c */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1107 /* 0x7d */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1108 /* 0x7e */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1109 /* 0x7f */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1110
1111 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1112 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1113 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1114 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1115 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1116 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1117 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1118 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1119 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1120 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1121 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1122 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1123 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1124 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1125 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1126 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1127
1128 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1129 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1130 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1131 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1132 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1133 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1134 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1135 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1136 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1137 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1138 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1139 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1140 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1141 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1142 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1143 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1144
1145 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1146 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1147 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1148 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1149 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1150 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1151 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1152 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1153 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1154 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1155 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1156 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1157 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1158 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1159 /* 0xae */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1160 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1161
1162 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1163 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1164 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1165 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1166 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1167 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1168 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1169 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1170 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1171 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1172 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1173 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1174 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1175 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1176 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1177 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1178
1179 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1180 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1181 /* 0xc2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1182 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1183 /* 0xc4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1184 /* 0xc5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1185 /* 0xc6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1186 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1187 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1188 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1189 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1190 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1191 /* 0xcc */ iemOp_vsha1rnds4_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1192 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1193 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1194 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1195
1196 /* 0xd0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1197 /* 0xd1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1198 /* 0xd2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1199 /* 0xd3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1200 /* 0xd4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1201 /* 0xd5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1202 /* 0xd6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1203 /* 0xd7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1204 /* 0xd8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1205 /* 0xd9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1206 /* 0xda */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1207 /* 0xdb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1208 /* 0xdc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1209 /* 0xdd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1210 /* 0xde */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1211 /* 0xdf */ iemOp_vaeskeygen_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1212
1213 /* 0xe0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1214 /* 0xe1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1215 /* 0xe2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1216 /* 0xe3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1217 /* 0xe4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1218 /* 0xe5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1219 /* 0xe6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1220 /* 0xe7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1221 /* 0xe8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1222 /* 0xe9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1223 /* 0xea */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1224 /* 0xeb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1225 /* 0xec */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1226 /* 0xed */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1227 /* 0xee */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1228 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1229
1230 /* 0xf0 */ iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_rorx_Gy_Ey_Ib,
1231 /* 0xf1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1232 /* 0xf2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1233 /* 0xf3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1234 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1235 /* 0xf5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1236 /* 0xf6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1237 /* 0xf7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1238 /* 0xf8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1239 /* 0xf9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1240 /* 0xfa */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1241 /* 0xfb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1242 /* 0xfc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1243 /* 0xfd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1244 /* 0xfe */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1245 /* 0xff */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1246};
1247AssertCompile(RT_ELEMENTS(g_apfnVexMap3) == 1024);
1248
1249/** @} */
1250
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette