VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap3.cpp.h@ 106723

Last change on this file since 106723 was 106061, checked in by vboxsync, 2 months ago

Copyright year updates by scm.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 132.9 KB
Line 
1/* $Id: IEMAllInstVexMap3.cpp.h 106061 2024-09-16 14:03:52Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation, 0x0f 0x3a map.
4 *
5 * @remarks IEMAllInstThree0f3a.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2024 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 3
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128, imm8
39 * - vpxxx ymm0, ymm1, ymm2/mem256, imm8
40 *
41 * Takes function table for function w/o implicit state parameter.
42 *
43 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
44 */
45FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF3IMM8, pImpl)
46{
47 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
48 if (IEM_IS_MODRM_REG_MODE(bRm))
49 {
50 /*
51 * Register, register.
52 */
53 if (pVCpu->iem.s.uVexLength)
54 {
55 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
56 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
57 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
58 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
59 IEM_MC_PREPARE_AVX_USAGE();
60
61 IEM_MC_LOCAL(RTUINT256U, uDst);
62 IEM_MC_LOCAL(RTUINT256U, uSrc1);
63 IEM_MC_LOCAL(RTUINT256U, uSrc2);
64 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
65 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
66 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
67 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
68 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
69 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
70 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
71 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
72 IEM_MC_ADVANCE_RIP_AND_FINISH();
73 IEM_MC_END();
74 }
75 else
76 {
77 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
78 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
79 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
80 IEM_MC_ARG(PRTUINT128U, puDst, 0);
81 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
82 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
83 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
84 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
85 IEM_MC_PREPARE_AVX_USAGE();
86 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
87 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
88 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
89 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
90 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
91 IEM_MC_ADVANCE_RIP_AND_FINISH();
92 IEM_MC_END();
93 }
94 }
95 else
96 {
97 /*
98 * Register, memory.
99 */
100 if (pVCpu->iem.s.uVexLength)
101 {
102 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
103 IEM_MC_LOCAL(RTUINT256U, uDst);
104 IEM_MC_LOCAL(RTUINT256U, uSrc1);
105 IEM_MC_LOCAL(RTUINT256U, uSrc2);
106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
107 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
108 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
109 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
110
111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
112 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
113 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
114 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
115 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
116 IEM_MC_PREPARE_AVX_USAGE();
117
118 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
119 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
120 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
121 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
122
123 IEM_MC_ADVANCE_RIP_AND_FINISH();
124 IEM_MC_END();
125 }
126 else
127 {
128 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
129 IEM_MC_LOCAL(RTUINT128U, uSrc2);
130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
131 IEM_MC_ARG(PRTUINT128U, puDst, 0);
132 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
133 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
134
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
136 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
137 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
138 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
139 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
140 IEM_MC_PREPARE_AVX_USAGE();
141
142 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
143 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
144 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
145 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
146 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
147
148 IEM_MC_ADVANCE_RIP_AND_FINISH();
149 IEM_MC_END();
150 }
151 }
152}
153
154
155/**
156 * Common worker for AVX instructions on the forms:
157 * - vxxxp{s,d} xmm0, xmm1/mem128, imm8
158 * - vxxxp{s,d} ymm0, ymm1/mem256, imm8
159 *
160 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation.
161 */
162FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Wx_Ib, PCIEMOPMEDIAF2IMM8, pImpl)
163{
164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
165 if (IEM_IS_MODRM_REG_MODE(bRm))
166 {
167 /*
168 * Register, register.
169 */
170 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
171 if (pVCpu->iem.s.uVexLength)
172 {
173 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
174 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
175 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
176 IEM_MC_PREPARE_AVX_USAGE();
177 IEM_MC_LOCAL(X86YMMREG, uDst);
178 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
179 IEM_MC_LOCAL(X86YMMREG, uSrc);
180 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
181 IEM_MC_FETCH_YREG_YMM(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
182 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
183 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc, bImmArg);
184 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
185 IEM_MC_ADVANCE_RIP_AND_FINISH();
186 IEM_MC_END();
187 }
188 else
189 {
190 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
191 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
192 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
193 IEM_MC_PREPARE_AVX_USAGE();
194 IEM_MC_LOCAL(X86XMMREG, uDst);
195 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
196 IEM_MC_ARG(PCX86XMMREG, puSrc, 1);
197 IEM_MC_REF_XREG_XMM_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
198 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
199 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc, bImmArg);
200 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
201 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
202 IEM_MC_ADVANCE_RIP_AND_FINISH();
203 IEM_MC_END();
204 }
205 }
206 else
207 {
208 /*
209 * Register, memory.
210 */
211 if (pVCpu->iem.s.uVexLength)
212 {
213 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
214 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
216 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
217 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
218 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
219 IEM_MC_PREPARE_AVX_USAGE();
220 IEM_MC_LOCAL(X86YMMREG, uDst);
221 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
222 IEM_MC_LOCAL(X86YMMREG, uSrc);
223 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
224 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
225 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
226 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc, bImmArg);
227 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
228 IEM_MC_ADVANCE_RIP_AND_FINISH();
229 IEM_MC_END();
230 }
231 else
232 {
233 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
236 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
237 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
238 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
239 IEM_MC_PREPARE_AVX_USAGE();
240 IEM_MC_LOCAL(X86XMMREG, uDst);
241 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
242 IEM_MC_LOCAL(X86XMMREG, uSrc);
243 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc, uSrc, 1);
244 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
245 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
246 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc, bImmArg);
247 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
248 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
249 IEM_MC_ADVANCE_RIP_AND_FINISH();
250 IEM_MC_END();
251 }
252 }
253}
254
255
256/**
257 * Common worker for AVX instructions on the forms:
258 * - vpermilps/d xmm0, xmm1/mem128, imm8
259 * - vpermilps/d ymm0, ymm1/mem256, imm8
260 *
261 * Takes function table for function w/o implicit state parameter.
262 *
263 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation.
264 */
265FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF2IMM8, pImpl)
266{
267 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
268 if (IEM_IS_MODRM_REG_MODE(bRm))
269 {
270 /*
271 * Register, register.
272 */
273 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
274 if (pVCpu->iem.s.uVexLength)
275 {
276 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
277 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
278 IEM_MC_LOCAL(RTUINT256U, uDst);
279 IEM_MC_LOCAL(RTUINT256U, uSrc);
280 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
281 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
282 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
283 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
284 IEM_MC_PREPARE_AVX_USAGE();
285 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
286 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc, bImmArg);
287 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
288 IEM_MC_ADVANCE_RIP_AND_FINISH();
289 IEM_MC_END();
290 }
291 else
292 {
293 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
294 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
295 IEM_MC_ARG(PRTUINT128U, puDst, 0);
296 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
297 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
298 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
299 IEM_MC_PREPARE_AVX_USAGE();
300 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
301 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
302 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc, bImmArg);
303 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
304 IEM_MC_ADVANCE_RIP_AND_FINISH();
305 IEM_MC_END();
306 }
307 }
308 else
309 {
310 /*
311 * Register, memory.
312 */
313 if (pVCpu->iem.s.uVexLength)
314 {
315 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
316 IEM_MC_LOCAL(RTUINT256U, uDst);
317 IEM_MC_LOCAL(RTUINT256U, uSrc);
318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
319 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
320 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
321
322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
323 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
324 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
325 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
326 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
327 IEM_MC_PREPARE_AVX_USAGE();
328
329 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
330 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc, bImmArg);
331 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
332
333 IEM_MC_ADVANCE_RIP_AND_FINISH();
334 IEM_MC_END();
335 }
336 else
337 {
338 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
339 IEM_MC_LOCAL(RTUINT128U, uSrc);
340 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
341 IEM_MC_ARG(PRTUINT128U, puDst, 0);
342 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
343
344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
345 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
346 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
347 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
348 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
349 IEM_MC_PREPARE_AVX_USAGE();
350
351 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
352 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
353 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc, bImmArg);
354 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
355
356 IEM_MC_ADVANCE_RIP_AND_FINISH();
357 IEM_MC_END();
358 }
359 }
360}
361
362
363/**
364 * Common worker for AVX instructions on the forms:
365 * - vblendps/d xmm0, xmm1, xmm2/mem128, imm8
366 * - vblendps/d ymm0, ymm1, ymm2/mem256, imm8
367 *
368 * Takes function table for function w/o implicit state parameter.
369 *
370 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation.
371 */
372FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF3IMM8, pImpl)
373{
374 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
375 if (IEM_IS_MODRM_REG_MODE(bRm))
376 {
377 /*
378 * Register, register.
379 */
380 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
381 if (pVCpu->iem.s.uVexLength)
382 {
383 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
384 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
385 IEM_MC_LOCAL(RTUINT256U, uDst);
386 IEM_MC_LOCAL(RTUINT256U, uSrc1);
387 IEM_MC_LOCAL(RTUINT256U, uSrc2);
388 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
389 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
390 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
391 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
392 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
393 IEM_MC_PREPARE_AVX_USAGE();
394 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
395 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
396 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
397 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
398 IEM_MC_ADVANCE_RIP_AND_FINISH();
399 IEM_MC_END();
400 }
401 else
402 {
403 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
404 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
405 IEM_MC_ARG(PRTUINT128U, puDst, 0);
406 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
407 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
408 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
409 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
410 IEM_MC_PREPARE_AVX_USAGE();
411 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
412 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
413 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
414 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
415 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
416 IEM_MC_ADVANCE_RIP_AND_FINISH();
417 IEM_MC_END();
418 }
419 }
420 else
421 {
422 /*
423 * Register, memory.
424 */
425 if (pVCpu->iem.s.uVexLength)
426 {
427 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
428 IEM_MC_LOCAL(RTUINT256U, uDst);
429 IEM_MC_LOCAL(RTUINT256U, uSrc1);
430 IEM_MC_LOCAL(RTUINT256U, uSrc2);
431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
432 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
433 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
434 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
435
436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
437 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
438 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
439 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
440 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
441 IEM_MC_PREPARE_AVX_USAGE();
442
443 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
444 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
445 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg);
446 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
447
448 IEM_MC_ADVANCE_RIP_AND_FINISH();
449 IEM_MC_END();
450 }
451 else
452 {
453 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
454 IEM_MC_LOCAL(RTUINT128U, uSrc2);
455 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
456 IEM_MC_ARG(PRTUINT128U, puDst, 0);
457 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
458 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
459
460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
461 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
462 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
463 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
464 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
465 IEM_MC_PREPARE_AVX_USAGE();
466
467 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
468 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
469 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
470 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg);
471 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
472
473 IEM_MC_ADVANCE_RIP_AND_FINISH();
474 IEM_MC_END();
475 }
476 }
477}
478
479
480/** Opcode VEX.66.0F3A 0x00. */
481FNIEMOP_DEF(iemOp_vpermq_Vqq_Wqq_Ib)
482{
483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
484 if (IEM_IS_MODRM_REG_MODE(bRm))
485 {
486 /*
487 * Register, register.
488 */
489 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
490 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
491 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx2);
492 IEM_MC_LOCAL(RTUINT256U, uDst);
493 IEM_MC_LOCAL(RTUINT256U, uSrc);
494 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
495 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
496 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
497 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
498 IEM_MC_PREPARE_AVX_USAGE();
499 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
500 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpermq_u256, iemAImpl_vpermq_u256_fallback),
501 puDst, puSrc, bImmArg);
502 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
503 IEM_MC_ADVANCE_RIP_AND_FINISH();
504 IEM_MC_END();
505 }
506 else
507 {
508 /*
509 * Register, memory.
510 */
511 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
514 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
515 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx2);
516 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
517 IEM_MC_PREPARE_AVX_USAGE();
518
519 IEM_MC_LOCAL(RTUINT256U, uSrc);
520 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
521 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
522 IEM_MC_LOCAL(RTUINT256U, uDst);
523 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
524 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
525 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpermq_u256, iemAImpl_vpermq_u256_fallback),
526 puDst, puSrc, bImmArg);
527 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
528 IEM_MC_ADVANCE_RIP_AND_FINISH();
529 IEM_MC_END();
530 }
531}
532
533
534/** Opcode VEX.66.0F3A 0x01. */
535FNIEMOP_DEF(iemOp_vpermpd_Vqq_Wqq_Ib)
536{
537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
538 if (IEM_IS_MODRM_REG_MODE(bRm))
539 {
540 /*
541 * Register, register.
542 */
543 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
544 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
545 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx2);
546 IEM_MC_LOCAL(RTUINT256U, uDst);
547 IEM_MC_LOCAL(RTUINT256U, uSrc);
548 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
549 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
550 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
551 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
552 IEM_MC_PREPARE_AVX_USAGE();
553 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
554 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpermpd_u256, iemAImpl_vpermpd_u256_fallback),
555 puDst, puSrc, bImmArg);
556 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
557 IEM_MC_ADVANCE_RIP_AND_FINISH();
558 IEM_MC_END();
559 }
560 else
561 {
562 /*
563 * Register, memory.
564 */
565 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
568 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
569 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx2);
570 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
571 IEM_MC_PREPARE_AVX_USAGE();
572
573 IEM_MC_LOCAL(RTUINT256U, uSrc);
574 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
575 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
576 IEM_MC_LOCAL(RTUINT256U, uDst);
577 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
578 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
579 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpermpd_u256, iemAImpl_vpermpd_u256_fallback),
580 puDst, puSrc, bImmArg);
581 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
582 IEM_MC_ADVANCE_RIP_AND_FINISH();
583 IEM_MC_END();
584 }
585}
586
587
588/** Opcode VEX.66.0F3A 0x02.
589 * AVX2,AVX2 */
590FNIEMOP_DEF(iemOp_vpblendd_Vx_Hx_Wx_Ib)
591{
592 IEMOP_MNEMONIC4(VEX_RVMI, VPBLENDD, vpblendd, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
593 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpblendd);
594 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
595}
596
597
598/* Opcode VEX.66.0F3A 0x03 - invalid */
599
600
601/** Opcode VEX.66.0F3A 0x04.
602 * AVX,AVX */
603FNIEMOP_DEF(iemOp_vpermilps_Vx_Wx_Ib)
604{
605 IEMOP_MNEMONIC3(VEX_RMI, VPERMILPS, vpermilps, Vx_WO, Wx, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_V_ZERO);
606 IEMOPMEDIAOPTF2IMM8_INIT_VARS(vpermilps);
607 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
608}
609
610
611/** Opcode VEX.66.0F3A 0x05.
612 * AVX,AVX */
613FNIEMOP_DEF(iemOp_vpermilpd_Vx_Wx_Ib)
614{
615 IEMOP_MNEMONIC3(VEX_RMI, VPERMILPD, vpermilpd, Vx_WO, Wx, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_V_ZERO);
616 IEMOPMEDIAOPTF2IMM8_INIT_VARS(vpermilpd);
617 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
618}
619
620
621/** Opcode VEX.66.0F3A 0x06 (vex only) */
622FNIEMOP_DEF(iemOp_vperm2f128_Vqq_Hqq_Wqq_Ib)
623{
624 IEMOP_MNEMONIC4(VEX_RVMI, VPERM2F128, vperm2f128, Vqq_WO, Hqq, Wqq, Ib, DISOPTYPE_HARMLESS, 0);
625 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
626 if (IEM_IS_MODRM_REG_MODE(bRm))
627 {
628 /*
629 * Register, register.
630 */
631 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
632 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
633 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
634 IEM_MC_LOCAL(RTUINT256U, uDst);
635 IEM_MC_LOCAL(RTUINT256U, uSrc1);
636 IEM_MC_LOCAL(RTUINT256U, uSrc2);
637 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
638 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
639 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
640 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
641 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
642 IEM_MC_PREPARE_AVX_USAGE();
643 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
644 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
645 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vperm2f128_u256, iemAImpl_vperm2f128_u256_fallback),
646 puDst, puSrc1, puSrc2, bImmArg);
647 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
648 IEM_MC_ADVANCE_RIP_AND_FINISH();
649 IEM_MC_END();
650 }
651 else
652 {
653 /*
654 * Register, memory.
655 */
656 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
657 IEM_MC_LOCAL(RTUINT256U, uDst);
658 IEM_MC_LOCAL(RTUINT256U, uSrc1);
659 IEM_MC_LOCAL(RTUINT256U, uSrc2);
660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
661 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
662 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
663 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
664
665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
666 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
667 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
668 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
669 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
670 IEM_MC_PREPARE_AVX_USAGE();
671
672 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
673 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
674 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vperm2f128_u256, iemAImpl_vperm2f128_u256_fallback),
675 puDst, puSrc1, puSrc2, bImmArg);
676 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
677
678 IEM_MC_ADVANCE_RIP_AND_FINISH();
679 IEM_MC_END();
680 }
681}
682
683
684/* Opcode VEX.66.0F3A 0x07 - invalid */
685
686
687/** Opcode VEX.66.0F3A 0x08. */
688FNIEMOP_DEF(iemOp_vroundps_Vx_Wx_Ib)
689{
690 IEMOP_MNEMONIC3(VEX_RMI, VROUNDPS, vroundps, Vx_WO, Wx, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_V_ZERO);
691 IEMOPMEDIAF2IMM8_INIT_VARS( vroundps);
692 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Wx_Ib, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
693}
694
695
696/** Opcode VEX.66.0F3A 0x09. */
697FNIEMOP_DEF(iemOp_vroundpd_Vx_Wx_Ib)
698{
699 IEMOP_MNEMONIC3(VEX_RMI, VROUNDPD, vroundpd, Vx_WO, Wx, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_V_ZERO);
700 IEMOPMEDIAF2IMM8_INIT_VARS( vroundpd);
701 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Wx_Ib, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
702}
703
704
705/** Opcode VEX.66.0F3A 0x0a. */
706FNIEMOP_DEF(iemOp_vroundss_Vss_Wss_Ib)
707{
708 IEMOP_MNEMONIC4(VEX_RVMI, VROUNDSS, vroundss, Vps, Hps, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
709 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
710 if (IEM_IS_MODRM_REG_MODE(bRm))
711 {
712 /*
713 * XMM32, XMM32.
714 */
715 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
716 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
717 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
718 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
719 IEM_MC_PREPARE_AVX_USAGE();
720 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
721 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
722 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_EFFECTIVE_VVVV(pVCpu), IEM_GET_MODRM_RM(pVCpu, bRm));
723 IEM_MC_LOCAL(X86XMMREG, uDst);
724 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
725 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
726 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vroundss_u128, iemAImpl_vroundss_u128_fallback),
727 puDst, pSrc, bImmArg);
728 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
729 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
730 IEM_MC_ADVANCE_RIP_AND_FINISH();
731 IEM_MC_END();
732 }
733 else
734 {
735 /*
736 * XMM32, [mem32].
737 */
738 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
741 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
742 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
743 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
744 IEM_MC_PREPARE_AVX_USAGE();
745 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
746 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
747 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
748 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(Src, IEM_GET_EFFECTIVE_VVVV(pVCpu),
749 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
750 IEM_MC_LOCAL(X86XMMREG, uDst);
751 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
752 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vroundss_u128, iemAImpl_vroundss_u128_fallback),
753 puDst, pSrc, bImmArg);
754 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
755 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
756 IEM_MC_ADVANCE_RIP_AND_FINISH();
757 IEM_MC_END();
758 }
759}
760
761
762/** Opcode VEX.66.0F3A 0x0b. */
763FNIEMOP_DEF(iemOp_vroundsd_Vsd_Wsd_Ib)
764{
765 IEMOP_MNEMONIC4(VEX_RVMI, VROUNDSD, vroundsd, Vpd, Hpd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
766 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
767 if (IEM_IS_MODRM_REG_MODE(bRm))
768 {
769 /*
770 * XMM64, XMM64.
771 */
772 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
773 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
774 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
775 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
776 IEM_MC_PREPARE_AVX_USAGE();
777 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
778 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
779 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_EFFECTIVE_VVVV(pVCpu), IEM_GET_MODRM_RM(pVCpu, bRm));
780 IEM_MC_LOCAL(X86XMMREG, uDst);
781 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
782 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
783 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vroundsd_u128, iemAImpl_vroundsd_u128_fallback),
784 puDst, pSrc, bImmArg);
785 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
786 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
787 IEM_MC_ADVANCE_RIP_AND_FINISH();
788 IEM_MC_END();
789 }
790 else
791 {
792 /*
793 * XMM64, [mem64].
794 */
795 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
798 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
799 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
800 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
801 IEM_MC_PREPARE_AVX_USAGE();
802 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
803 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 1);
804 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
805 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(Src, IEM_GET_EFFECTIVE_VVVV(pVCpu),
806 0 /*a_iQword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
807 IEM_MC_LOCAL(X86XMMREG, uDst);
808 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
809 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vroundsd_u128, iemAImpl_vroundsd_u128_fallback),
810 puDst, pSrc, bImmArg);
811 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
812 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
813 IEM_MC_ADVANCE_RIP_AND_FINISH();
814 IEM_MC_END();
815 }
816}
817
818
819/** Opcode VEX.66.0F3A 0x0c.
820 * AVX,AVX */
821FNIEMOP_DEF(iemOp_vblendps_Vx_Hx_Wx_Ib)
822{
823 IEMOP_MNEMONIC4(VEX_RVMI, VBLENDPS, vblendps, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
824 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vblendps);
825 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
826}
827
828
829/** Opcode VEX.66.0F3A 0x0d.
830 * AVX,AVX */
831FNIEMOP_DEF(iemOp_vblendpd_Vx_Hx_Wx_Ib)
832{
833 IEMOP_MNEMONIC4(VEX_RVMI, VBLENDPD, vblendpd, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
834 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vblendpd);
835 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
836}
837
838
839/** Opcode VEX.66.0F3A 0x0e.
840 * AVX,AVX2 */
841FNIEMOP_DEF(iemOp_vpblendw_Vx_Hx_Wx_Ib)
842{
843 IEMOP_MNEMONIC4(VEX_RVMI, VPBLENDW, vpblendw, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
844 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpblendw);
845 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
846}
847
848
849/** Opcode VEX.0F3A 0x0f - invalid. */
850
851
852/** Opcode VEX.66.0F3A 0x0f.
853 * AVX,AVX2 */
854FNIEMOP_DEF(iemOp_vpalignr_Vx_Hx_Wx_Ib)
855{
856 IEMOP_MNEMONIC4(VEX_RVMI, VPALIGNR, vpalignr, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
857 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vpalignr);
858 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
859}
860
861
862/* Opcode VEX.66.0F3A 0x10 - invalid */
863/* Opcode VEX.66.0F3A 0x11 - invalid */
864/* Opcode VEX.66.0F3A 0x12 - invalid */
865/* Opcode VEX.66.0F3A 0x13 - invalid */
866
867
868/** Opcode VEX.66.0F3A 0x14 - vpextrb Eb, Vdq, Ib */
869FNIEMOP_DEF(iemOp_vpextrb_Eb_Vdq_Ib)
870{
871 IEMOP_MNEMONIC3(VEX_MRI, VPEXTRB, vpextrb, Eb, Vdq, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_V_ZERO | IEMOPHINT_IGNORES_REXW);
872 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
873 if (IEM_IS_MODRM_REG_MODE(bRm))
874 {
875 /*
876 * greg32, XMM, imm8.
877 */
878 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
879 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
880 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
881 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
882 IEM_MC_PREPARE_AVX_USAGE();
883
884 IEM_MC_LOCAL(uint8_t, uValue);
885 IEM_MC_FETCH_XREG_U8(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 15 /*a_iByte*/);
886 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uValue);
887 IEM_MC_ADVANCE_RIP_AND_FINISH();
888 IEM_MC_END();
889 }
890 else
891 {
892 /*
893 * [mem8], XMM, imm8.
894 */
895 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
896 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
899
900 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
901 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
902 IEM_MC_PREPARE_AVX_USAGE();
903
904 IEM_MC_LOCAL(uint8_t, uValue);
905 IEM_MC_FETCH_XREG_U8(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 15 /*a_iByte*/);
906 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uValue);
907 IEM_MC_ADVANCE_RIP_AND_FINISH();
908 IEM_MC_END();
909 }
910}
911
912
913/** Opcode VEX.66.0F3A 0x15 - vpextrw Ew, Vdq, Ib */
914FNIEMOP_DEF(iemOp_vpextrw_Ew_Vdq_Ib)
915{
916 /** @todo testcase: check that this ignores VEX.W. */
917 IEMOP_MNEMONIC3(VEX_MRI, VPEXTRW, vpextrw, Ew_WO, Vdq, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_V_ZERO | IEMOPHINT_IGNORES_REXW);
918 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
919 if (IEM_IS_MODRM_REG_MODE(bRm))
920 {
921 /*
922 * greg32, XMM, imm8.
923 */
924 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
925 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
926 IEM_MC_LOCAL(uint16_t, uValue);
927
928 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
929 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
930 IEM_MC_PREPARE_AVX_USAGE();
931
932 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7);
933 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uValue);
934 IEM_MC_ADVANCE_RIP_AND_FINISH();
935 IEM_MC_END();
936 }
937 else
938 {
939 /*
940 * [mem16], XMM, imm8.
941 */
942 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
943 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
944 IEM_MC_LOCAL(uint16_t, uValue);
945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
947
948 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
949 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
950 IEM_MC_PREPARE_AVX_USAGE();
951
952 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7);
953 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uValue);
954 IEM_MC_ADVANCE_RIP_AND_FINISH();
955 IEM_MC_END();
956 }
957}
958
959
960/** Opcode VEX.66.0F3A 0x16 - vpextrd / vpextrq Eq / Ey, Vdq, Ib */
961FNIEMOP_DEF(iemOp_vpextrd_q_Ey_Vdq_Ib)
962{
963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
964 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
965 {
966 IEMOP_MNEMONIC3(VEX_MRI, VPEXTRQ, vpextrq, Eq_WO, Vdq, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_W_ONE);
967 if (IEM_IS_MODRM_REG_MODE(bRm))
968 {
969 /*
970 * greg64, XMM, imm8.
971 */
972 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
973 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
974 IEM_MC_LOCAL(uint64_t, uValue);
975
976 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
977 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
978 IEM_MC_PREPARE_AVX_USAGE();
979
980 IEM_MC_FETCH_XREG_U64(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
981 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uValue);
982 IEM_MC_ADVANCE_RIP_AND_FINISH();
983 IEM_MC_END();
984 }
985 else
986 {
987 /*
988 * [mem64], XMM, imm8.
989 */
990 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
991 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
992 IEM_MC_LOCAL(uint64_t, uValue);
993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
995
996 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
997 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
998 IEM_MC_PREPARE_AVX_USAGE();
999
1000 IEM_MC_FETCH_XREG_U64(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
1001 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uValue);
1002 IEM_MC_ADVANCE_RIP_AND_FINISH();
1003 IEM_MC_END();
1004 }
1005 }
1006 else
1007 {
1008 /**
1009 * @opdone
1010 */
1011 IEMOP_MNEMONIC3(VEX_MRI, VPEXTRD, vpextrd, Ey_WO, Vdq, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_W_ZERO);
1012 if (IEM_IS_MODRM_REG_MODE(bRm))
1013 {
1014 /*
1015 * greg32, XMM, imm8.
1016 */
1017 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1018 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1019 IEM_MC_LOCAL(uint32_t, uValue);
1020
1021 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1022 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1023 IEM_MC_PREPARE_AVX_USAGE();
1024
1025 IEM_MC_FETCH_XREG_U32(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3);
1026 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uValue);
1027 IEM_MC_ADVANCE_RIP_AND_FINISH();
1028 IEM_MC_END();
1029 }
1030 else
1031 {
1032 /*
1033 * [mem32], XMM, imm8.
1034 */
1035 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1036 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1037 IEM_MC_LOCAL(uint32_t, uValue);
1038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1040
1041 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1042 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1043 IEM_MC_PREPARE_AVX_USAGE();
1044
1045 IEM_MC_FETCH_XREG_U32(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3);
1046 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uValue);
1047 IEM_MC_ADVANCE_RIP_AND_FINISH();
1048 IEM_MC_END();
1049 }
1050 }
1051}
1052
1053
1054/** Opcode VEX.66.0F3A 0x17. */
1055FNIEMOP_DEF(iemOp_vextractps_Ed_Vdq_Ib)
1056{
1057 //IEMOP_MNEMONIC3(VEX_MRI_REG, VEXTRACTPS, vextractps, Ed, Vdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_ZERO);
1058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1059 if (IEM_IS_MODRM_REG_MODE(bRm))
1060 {
1061 /*
1062 * greg32, XMM, imm8.
1063 */
1064 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1065 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1066 IEM_MC_LOCAL(uint32_t, uSrc);
1067
1068 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1069 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1070 IEM_MC_PREPARE_AVX_USAGE();
1071
1072 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3 /*a_iDword*/);
1073 IEM_MC_STORE_GREG_U32( IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
1074 IEM_MC_ADVANCE_RIP_AND_FINISH();
1075 IEM_MC_END();
1076 }
1077 else
1078 {
1079 /*
1080 * [mem32], XMM, imm8.
1081 */
1082 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1083 IEM_MC_LOCAL(uint32_t, uSrc);
1084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1085
1086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1087 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1088 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1089 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1090 IEM_MC_PREPARE_AVX_USAGE();
1091
1092 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3 /*a_iDword*/);
1093 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1094 IEM_MC_ADVANCE_RIP_AND_FINISH();
1095 IEM_MC_END();
1096 }
1097}
1098
1099
1100/** Opcode VEX.66.0F3A 0x18 (vex only). */
1101FNIEMOP_DEF(iemOp_vinsertf128_Vqq_Hqq_Wqq_Ib)
1102{
1103 //IEMOP_MNEMONIC4(VEX_RMI, VINSERTF128, vinsertf128, Vx, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
1104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1105 if (IEM_IS_MODRM_REG_MODE(bRm))
1106 {
1107 /*
1108 * Register, register.
1109 */
1110 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1111 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1112 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1113 IEM_MC_LOCAL(RTUINT128U, uSrc);
1114
1115 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1116 IEM_MC_PREPARE_AVX_USAGE();
1117
1118 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1119 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
1120 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
1121
1122 IEM_MC_ADVANCE_RIP_AND_FINISH();
1123 IEM_MC_END();
1124 }
1125 else
1126 {
1127 /*
1128 * Register, memory.
1129 */
1130 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1131 IEM_MC_LOCAL(RTUINT128U, uSrc);
1132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1133
1134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1135 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1136 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1137 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1138 IEM_MC_PREPARE_AVX_USAGE();
1139
1140 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1141 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
1142 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
1143
1144 IEM_MC_ADVANCE_RIP_AND_FINISH();
1145 IEM_MC_END();
1146 }
1147}
1148
1149
1150/** Opcode VEX.66.0F3A 0x19 (vex only). */
1151FNIEMOP_DEF(iemOp_vextractf128_Wdq_Vqq_Ib)
1152{
1153 IEMOP_MNEMONIC3(VEX_MRI, VEXTRACTF128, vextractf128, Wdq, Vqq, Ib, DISOPTYPE_HARMLESS, 0);
1154 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1155 if (IEM_IS_MODRM_REG_MODE(bRm))
1156 {
1157 /*
1158 * Register, register.
1159 */
1160 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1161 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1162 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx2);
1163
1164 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1165 IEM_MC_PREPARE_AVX_USAGE();
1166
1167 IEM_MC_LOCAL(RTUINT128U, uDst);
1168 IEM_MC_FETCH_YREG_U128(uDst, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
1169 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_RM(pVCpu, bRm), uDst);
1170
1171 IEM_MC_ADVANCE_RIP_AND_FINISH();
1172 IEM_MC_END();
1173 }
1174 else
1175 {
1176 /*
1177 * Register, memory.
1178 */
1179 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1181
1182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1183 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1184 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx2);
1185 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1186 IEM_MC_PREPARE_AVX_USAGE();
1187
1188 IEM_MC_LOCAL(RTUINT128U, uDst);
1189 IEM_MC_FETCH_YREG_U128(uDst, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
1190 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uDst);
1191
1192 IEM_MC_ADVANCE_RIP_AND_FINISH();
1193 IEM_MC_END();
1194 }
1195}
1196
1197
1198/* Opcode VEX.66.0F3A 0x1a - invalid */
1199/* Opcode VEX.66.0F3A 0x1b - invalid */
1200/* Opcode VEX.66.0F3A 0x1c - invalid */
1201/** Opcode VEX.66.0F3A 0x1d (vex only). */
1202FNIEMOP_STUB(iemOp_vcvtps2ph_Wx_Vx_Ib);
1203/* Opcode VEX.66.0F3A 0x1e - invalid */
1204/* Opcode VEX.66.0F3A 0x1f - invalid */
1205
1206
1207/** Opcode VEX.66.0F3A 0x20. */
1208FNIEMOP_DEF(iemOp_vpinsrb_Vdq_Hdq_RyMb_Ib)
1209{
1210 /*IEMOP_MNEMONIC4(VEX_RMVI, VPINSRB, vpinsrb, Vdq, Hdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
1211 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1212 if (IEM_IS_MODRM_REG_MODE(bRm))
1213 {
1214 /*
1215 * Register, register.
1216 */
1217 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1218 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1219 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1220 IEM_MC_LOCAL(uint8_t, uValue);
1221
1222 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1223 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1224 IEM_MC_PREPARE_AVX_USAGE();
1225
1226 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1227 IEM_MC_FETCH_GREG_U8(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
1228 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1229 IEM_MC_STORE_XREG_U8( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 15, uValue);
1230 IEM_MC_ADVANCE_RIP_AND_FINISH();
1231 IEM_MC_END();
1232 }
1233 else
1234 {
1235 /*
1236 * Register, memory.
1237 */
1238 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1240 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1241 IEM_MC_LOCAL(uint8_t, uValue);
1242
1243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1244 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1245 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1246 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1247 IEM_MC_PREPARE_AVX_USAGE();
1248
1249 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1250 IEM_MC_FETCH_MEM_U8(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1251 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1252 IEM_MC_STORE_XREG_U8( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 15, uValue);
1253 IEM_MC_ADVANCE_RIP_AND_FINISH();
1254 IEM_MC_END();
1255 }
1256}
1257
1258
1259/** Opcode VEX.66.0F3A 0x21, */
1260FNIEMOP_DEF(iemOp_vinsertps_Vdq_Hdq_UdqMd_Ib)
1261{
1262 //IEMOP_MNEMONIC4(VEX_RVMR_REG, VINSERTPS, vinsertps, Vdq, Hdq, UdqMd, Ib, DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_ZERO); /// @todo
1263 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1264 if (IEM_IS_MODRM_REG_MODE(bRm))
1265 {
1266 /*
1267 * XMM, XMM, XMM, imm8.
1268 */
1269 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1270 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1271 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1272 IEM_MC_LOCAL(uint32_t, uSrc2);
1273
1274 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1275 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1276 IEM_MC_PREPARE_AVX_USAGE();
1277
1278 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1279 IEM_MC_FETCH_XREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), (bImm >> 6) & 3);
1280 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1281 IEM_MC_STORE_XREG_U32( IEM_GET_MODRM_REG(pVCpu, bRm), (bImm >> 4) & 3, uSrc2);
1282 IEM_MC_CLEAR_XREG_U32_MASK( IEM_GET_MODRM_REG(pVCpu, bRm), bImm);
1283 IEM_MC_ADVANCE_RIP_AND_FINISH();
1284 IEM_MC_END();
1285 }
1286 else
1287 {
1288 /*
1289 * XMM, XMM, [mem32], imm8.
1290 */
1291 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1293 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1294 IEM_MC_LOCAL(uint32_t, uSrc2);
1295
1296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1297 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1298 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1299 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1300 IEM_MC_PREPARE_AVX_USAGE();
1301
1302 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1303 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1304 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1305 IEM_MC_STORE_XREG_U32( IEM_GET_MODRM_REG(pVCpu, bRm), (bImm >> 4) & 3, uSrc2);
1306 IEM_MC_CLEAR_XREG_U32_MASK( IEM_GET_MODRM_REG(pVCpu, bRm), bImm);
1307 IEM_MC_ADVANCE_RIP_AND_FINISH();
1308 IEM_MC_END();
1309 }
1310}
1311
1312
1313/** Opcode VEX.66.0F3A 0x22. */
1314FNIEMOP_DEF(iemOp_vpinsrd_q_Vdq_Hdq_Ey_Ib)
1315{
1316 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1317 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1318 {
1319 /*IEMOP_MNEMONIC4(VEX_RMVI, VPINSRQ, vpinsrq, Vdq, Hdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
1320 if (IEM_IS_MODRM_REG_MODE(bRm))
1321 {
1322 /*
1323 * Register, register.
1324 */
1325 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1326 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1327 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1328 IEM_MC_LOCAL(uint64_t, uValue);
1329
1330 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1331 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1332 IEM_MC_PREPARE_AVX_USAGE();
1333
1334 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1335 IEM_MC_FETCH_GREG_U64(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
1336 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1337 IEM_MC_STORE_XREG_U64( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uValue);
1338 IEM_MC_ADVANCE_RIP_AND_FINISH();
1339 IEM_MC_END();
1340 }
1341 else
1342 {
1343 /*
1344 * Register, memory.
1345 */
1346 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1348 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1349 IEM_MC_LOCAL(uint64_t, uValue);
1350
1351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1352 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1353 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1354 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1355 IEM_MC_PREPARE_AVX_USAGE();
1356
1357 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1358 IEM_MC_FETCH_MEM_U64(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1359 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1360 IEM_MC_STORE_XREG_U64( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uValue);
1361 IEM_MC_ADVANCE_RIP_AND_FINISH();
1362 IEM_MC_END();
1363 }
1364 }
1365 else
1366 {
1367 /*IEMOP_MNEMONIC4(VEX_RMVI, VPINSRD, vpinsrd, Vdq, Hdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
1368 if (IEM_IS_MODRM_REG_MODE(bRm))
1369 {
1370 /*
1371 * Register, register.
1372 */
1373 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1374 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1375 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1376 IEM_MC_LOCAL(uint32_t, uValue);
1377
1378 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1379 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1380 IEM_MC_PREPARE_AVX_USAGE();
1381
1382 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1383 IEM_MC_FETCH_GREG_U32(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
1384 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1385 IEM_MC_STORE_XREG_U32( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
1386 IEM_MC_ADVANCE_RIP_AND_FINISH();
1387 IEM_MC_END();
1388 }
1389 else
1390 {
1391 /*
1392 * Register, memory.
1393 */
1394 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1396 IEM_MC_LOCAL(RTUINT128U, uSrc1);
1397 IEM_MC_LOCAL(uint32_t, uValue);
1398
1399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1400 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1401 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1402 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1403 IEM_MC_PREPARE_AVX_USAGE();
1404
1405 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1406 IEM_MC_FETCH_MEM_U32(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1407 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
1408 IEM_MC_STORE_XREG_U32( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3, uValue);
1409 IEM_MC_ADVANCE_RIP_AND_FINISH();
1410 IEM_MC_END();
1411 }
1412 }
1413}
1414
1415
1416/* Opcode VEX.66.0F3A 0x23 - invalid */
1417/* Opcode VEX.66.0F3A 0x24 - invalid */
1418/* Opcode VEX.66.0F3A 0x25 - invalid */
1419/* Opcode VEX.66.0F3A 0x26 - invalid */
1420/* Opcode VEX.66.0F3A 0x27 - invalid */
1421/* Opcode VEX.66.0F3A 0x28 - invalid */
1422/* Opcode VEX.66.0F3A 0x29 - invalid */
1423/* Opcode VEX.66.0F3A 0x2a - invalid */
1424/* Opcode VEX.66.0F3A 0x2b - invalid */
1425/* Opcode VEX.66.0F3A 0x2c - invalid */
1426/* Opcode VEX.66.0F3A 0x2d - invalid */
1427/* Opcode VEX.66.0F3A 0x2e - invalid */
1428/* Opcode VEX.66.0F3A 0x2f - invalid */
1429
1430
1431/* Opcode VEX.66.0F3A 0x30 - invalid */
1432/* Opcode VEX.66.0F3A 0x31 - invalid */
1433/* Opcode VEX.66.0F3A 0x32 - invalid */
1434/* Opcode VEX.66.0F3A 0x33 - invalid */
1435/* Opcode VEX.66.0F3A 0x34 - invalid */
1436/* Opcode VEX.66.0F3A 0x35 - invalid */
1437/* Opcode VEX.66.0F3A 0x36 - invalid */
1438/* Opcode VEX.66.0F3A 0x37 - invalid */
1439
1440
1441/** Opcode VEX.66.0F3A 0x38 (vex only). */
1442FNIEMOP_DEF(iemOp_vinserti128_Vqq_Hqq_Wqq_Ib)
1443{
1444 //IEMOP_MNEMONIC4(VEX_RMI, VINSERTI128, vinserti128, Vx, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
1445 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1446 if (IEM_IS_MODRM_REG_MODE(bRm))
1447 {
1448 /*
1449 * Register, register.
1450 */
1451 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1452 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1453 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1454 IEM_MC_LOCAL(RTUINT128U, uSrc);
1455
1456 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1457 IEM_MC_PREPARE_AVX_USAGE();
1458
1459 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1460 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
1461 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
1462
1463 IEM_MC_ADVANCE_RIP_AND_FINISH();
1464 IEM_MC_END();
1465 }
1466 else
1467 {
1468 /*
1469 * Register, memory.
1470 */
1471 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1472 IEM_MC_LOCAL(RTUINT128U, uSrc);
1473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1474
1475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1476 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1477 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1478 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1479 IEM_MC_PREPARE_AVX_USAGE();
1480
1481 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1482 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_EFFECTIVE_VVVV(pVCpu));
1483 IEM_MC_STORE_YREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1, uSrc);
1484
1485 IEM_MC_ADVANCE_RIP_AND_FINISH();
1486 IEM_MC_END();
1487 }
1488}
1489
1490
1491/** Opcode VEX.66.0F3A 0x39 (vex only). */
1492FNIEMOP_DEF(iemOp_vextracti128_Wdq_Vqq_Ib)
1493{
1494 IEMOP_MNEMONIC3(VEX_MRI, VEXTRACTI128, vextracti128, Wdq, Vqq, Ib, DISOPTYPE_HARMLESS, 0);
1495 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1496 if (IEM_IS_MODRM_REG_MODE(bRm))
1497 {
1498 /*
1499 * Register, register.
1500 */
1501 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1502 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1503 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx2);
1504
1505 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1506 IEM_MC_PREPARE_AVX_USAGE();
1507
1508 IEM_MC_LOCAL(RTUINT128U, uDst);
1509 IEM_MC_FETCH_YREG_U128(uDst, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
1510 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_RM(pVCpu, bRm), uDst);
1511
1512 IEM_MC_ADVANCE_RIP_AND_FINISH();
1513 IEM_MC_END();
1514 }
1515 else
1516 {
1517 /*
1518 * Register, memory.
1519 */
1520 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1521 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1522
1523 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1524 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1525 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx2);
1526 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1527 IEM_MC_PREPARE_AVX_USAGE();
1528
1529 IEM_MC_LOCAL(RTUINT128U, uDst);
1530 IEM_MC_FETCH_YREG_U128(uDst, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1);
1531 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uDst);
1532
1533 IEM_MC_ADVANCE_RIP_AND_FINISH();
1534 IEM_MC_END();
1535 }
1536}
1537
1538
1539/* Opcode VEX.66.0F3A 0x3a - invalid */
1540/* Opcode VEX.66.0F3A 0x3b - invalid */
1541/* Opcode VEX.66.0F3A 0x3c - invalid */
1542/* Opcode VEX.66.0F3A 0x3d - invalid */
1543/* Opcode VEX.66.0F3A 0x3e - invalid */
1544/* Opcode VEX.66.0F3A 0x3f - invalid */
1545
1546
1547/** Opcode VEX.66.0F3A 0x40. */
1548FNIEMOP_DEF(iemOp_vdpps_Vx_Hx_Wx_Ib)
1549{
1550 IEMOP_MNEMONIC4(VEX_RVMI, VDPPS, vdpps, Vps, Hps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1551 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1552 if (IEM_IS_MODRM_REG_MODE(bRm))
1553 {
1554 /*
1555 * Register, Register
1556 */
1557 if (pVCpu->iem.s.uVexLength)
1558 {
1559 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1560 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1561 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1562 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1563 IEM_MC_PREPARE_AVX_USAGE();
1564 IEM_MC_LOCAL(IEMMEDIAF2YMMSRC, uSrc);
1565 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2YMMSRC, puSrc, uSrc, 1);
1566 IEM_MC_FETCH_YREG_PAIR_YMM(uSrc, IEM_GET_EFFECTIVE_VVVV(pVCpu), IEM_GET_MODRM_RM(pVCpu, bRm));
1567 IEM_MC_LOCAL(X86YMMREG, uDst);
1568 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
1569 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
1570 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdpps_u256, iemAImpl_vdpps_u256_fallback),
1571 puDst, puSrc, bImmArg);
1572 IEM_MC_STORE_YREG_YMM_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1573 IEM_MC_ADVANCE_RIP_AND_FINISH();
1574 IEM_MC_END();
1575 }
1576 else
1577 {
1578 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1579 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1580 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1581 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1582 IEM_MC_PREPARE_AVX_USAGE();
1583 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
1584 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
1585 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_EFFECTIVE_VVVV(pVCpu), IEM_GET_MODRM_RM(pVCpu, bRm));
1586 IEM_MC_LOCAL(X86XMMREG, uDst);
1587 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
1588 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
1589 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdpps_u128, iemAImpl_vdpps_u128_fallback),
1590 puDst, puSrc, bImmArg);
1591 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1592 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1593 IEM_MC_ADVANCE_RIP_AND_FINISH();
1594 IEM_MC_END();
1595 }
1596 }
1597 else
1598 {
1599 /*
1600 * Register, Memory.
1601 */
1602 if (pVCpu->iem.s.uVexLength)
1603 {
1604 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1607 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1608 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1609 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1610 IEM_MC_PREPARE_AVX_USAGE();
1611 IEM_MC_LOCAL(IEMMEDIAF2YMMSRC, uSrc);
1612 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2YMMSRC, puSrc, uSrc, 1);
1613 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
1614 IEM_MC_FETCH_MEM_YMM_ALIGN_AVX_AND_YREG_YMM(uSrc, IEM_GET_EFFECTIVE_VVVV(pVCpu),
1615 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1616 IEM_MC_LOCAL(X86YMMREG, uDst);
1617 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
1618 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdpps_u256, iemAImpl_vdpps_u256_fallback),
1619 puDst, puSrc, bImmArg);
1620 IEM_MC_STORE_YREG_YMM_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1621 IEM_MC_ADVANCE_RIP_AND_FINISH();
1622 IEM_MC_END();
1623 }
1624 else
1625 {
1626 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1629 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1630 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1631 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1632 IEM_MC_PREPARE_AVX_USAGE();
1633 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
1634 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
1635 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
1636 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(uSrc, IEM_GET_EFFECTIVE_VVVV(pVCpu),
1637 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1638 IEM_MC_LOCAL(X86XMMREG, uDst);
1639 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
1640 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdpps_u128, iemAImpl_vdpps_u128_fallback),
1641 puDst, puSrc, bImmArg);
1642 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1643 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1644 IEM_MC_ADVANCE_RIP_AND_FINISH();
1645 IEM_MC_END();
1646 }
1647 }
1648}
1649
1650
1651/** Opcode VEX.66.0F3A 0x41, */
1652FNIEMOP_DEF(iemOp_vdppd_Vdq_Hdq_Wdq_Ib)
1653{
1654 IEMOP_MNEMONIC4(VEX_RVMI, VDPPD, vdppd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_ZERO);
1655 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1656 if (IEM_IS_MODRM_REG_MODE(bRm))
1657 {
1658 /*
1659 * Register, Register
1660 */
1661 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1662 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1663 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1664 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1665 IEM_MC_PREPARE_AVX_USAGE();
1666 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
1667 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
1668 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_EFFECTIVE_VVVV(pVCpu), IEM_GET_MODRM_RM(pVCpu, bRm));
1669 IEM_MC_LOCAL(X86XMMREG, uDst);
1670 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
1671 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
1672 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdppd_u128, iemAImpl_vdppd_u128_fallback),
1673 puDst, puSrc, bImmArg);
1674 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1675 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1676 IEM_MC_ADVANCE_RIP_AND_FINISH();
1677 IEM_MC_END();
1678 }
1679 else
1680 {
1681 /*
1682 * Register, Memory.
1683 */
1684 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1685 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1687 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1688 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1689 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1690 IEM_MC_PREPARE_AVX_USAGE();
1691 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
1692 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
1693 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
1694 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(uSrc, IEM_GET_EFFECTIVE_VVVV(pVCpu),
1695 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1696 IEM_MC_LOCAL(X86XMMREG, uDst);
1697 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
1698 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdppd_u128, iemAImpl_vdppd_u128_fallback),
1699 puDst, puSrc, bImmArg);
1700 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1701 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1702 IEM_MC_ADVANCE_RIP_AND_FINISH();
1703 IEM_MC_END();
1704 }
1705}
1706
1707
1708/** Opcode VEX.66.0F3A 0x42. */
1709FNIEMOP_DEF(iemOp_vmpsadbw_Vx_Hx_Wx_Ib)
1710{
1711 IEMOP_MNEMONIC4(VEX_RVMI, VMPSADBW, vmpsadbw, Vx_WO, Hx, Wx, Ib, DISOPTYPE_HARMLESS, 0);
1712 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vmpsadbw);
1713 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
1714}
1715
1716
1717/* Opcode VEX.66.0F3A 0x43 - invalid */
1718
1719
1720/** Opcode VEX.66.0F3A 0x44. */
1721FNIEMOP_DEF(iemOp_vpclmulqdq_Vdq_Hdq_Wdq_Ib)
1722{
1723 IEMOP_MNEMONIC4(VEX_RVMI, VPCLMULQDQ, vpclmulqdq, Vdq_WO, Hdq, Wdq, Id, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
1724 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1725 if (IEM_IS_MODRM_REG_MODE(bRm))
1726 {
1727 /*
1728 * Register, register.
1729 */
1730 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1731 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1732 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fPclMul);
1733 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1734 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1735 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
1736 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1737 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1738 IEM_MC_PREPARE_AVX_USAGE();
1739 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1740 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1741 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1742 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fPclMul, iemAImpl_vpclmulqdq_u128, iemAImpl_vpclmulqdq_u128_fallback),
1743 puDst, puSrc1, puSrc2, bImmArg);
1744 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1745 IEM_MC_ADVANCE_RIP_AND_FINISH();
1746 IEM_MC_END();
1747 }
1748 else
1749 {
1750 /*
1751 * Register, memory.
1752 */
1753 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1754 IEM_MC_LOCAL(RTUINT128U, uSrc2);
1755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1756 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1757 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1758 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
1759
1760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1761 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1762 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1763 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fPclMul);
1764 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1765 IEM_MC_PREPARE_AVX_USAGE();
1766
1767 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1768 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1769 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1770 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fPclMul, iemAImpl_vpclmulqdq_u128, iemAImpl_vpclmulqdq_u128_fallback),
1771 puDst, puSrc1, puSrc2, bImmArg);
1772 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1773
1774 IEM_MC_ADVANCE_RIP_AND_FINISH();
1775 IEM_MC_END();
1776 }
1777}
1778
1779
1780/* Opcode VEX.66.0F3A 0x45 - invalid */
1781
1782
1783/** Opcode VEX.66.0F3A 0x46 (vex only) */
1784FNIEMOP_DEF(iemOp_vperm2i128_Vqq_Hqq_Wqq_Ib)
1785{
1786 IEMOP_MNEMONIC4(VEX_RVMI, VPERM2I128, vperm2i128, Vqq_WO, Hqq, Wqq, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ONE);
1787 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1788 if (IEM_IS_MODRM_REG_MODE(bRm))
1789 {
1790 /*
1791 * Register, register.
1792 */
1793 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1794 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1795 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1796 IEM_MC_LOCAL(RTUINT256U, uDst);
1797 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1798 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1799 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1800 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1801 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1802 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
1803 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1804 IEM_MC_PREPARE_AVX_USAGE();
1805 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1806 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1807 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vperm2i128_u256, iemAImpl_vperm2i128_u256_fallback),
1808 puDst, puSrc1, puSrc2, bImmArg);
1809 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1810 IEM_MC_ADVANCE_RIP_AND_FINISH();
1811 IEM_MC_END();
1812 }
1813 else
1814 {
1815 /*
1816 * Register, memory.
1817 */
1818 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1819 IEM_MC_LOCAL(RTUINT256U, uDst);
1820 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1821 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1822 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1823 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1824 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1825 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1826
1827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1828 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1829 IEM_MC_ARG_CONST(uint8_t, bImmArg, bImm, 3);
1830 IEMOP_HLP_DONE_VEX_DECODING_L1_EX(fAvx2);
1831 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1832 IEM_MC_PREPARE_AVX_USAGE();
1833
1834 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1835 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1836 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vperm2i128_u256, iemAImpl_vperm2i128_u256_fallback),
1837 puDst, puSrc1, puSrc2, bImmArg);
1838 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1839
1840 IEM_MC_ADVANCE_RIP_AND_FINISH();
1841 IEM_MC_END();
1842 }
1843}
1844
1845
1846/* Opcode VEX.66.0F3A 0x47 - invalid */
1847/** Opcode VEX.66.0F3A 0x48 (AMD tables only). */
1848FNIEMOP_STUB(iemOp_vperlmilzz2ps_Vx_Hx_Wp_Lx);
1849/** Opcode VEX.66.0F3A 0x49 (AMD tables only). */
1850FNIEMOP_STUB(iemOp_vperlmilzz2pd_Vx_Hx_Wp_Lx);
1851
1852
1853/**
1854 * Common worker for AVX2 instructions on the forms:
1855 * - vblendvps/d xmm0, xmm1, xmm2/mem128, xmm4
1856 * - vblendvps/d ymm0, ymm1, ymm2/mem256, ymm4
1857 *
1858 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operations.
1859 * Additionally, it triggers \#UD if VEX.W is 1.
1860 */
1861FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, PCIEMOPBLENDOP, pImpl)
1862{
1863 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1864 if (IEM_IS_MODRM_REG_MODE(bRm))
1865 {
1866 /*
1867 * Register, register.
1868 */
1869 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1870 if (pVCpu->iem.s.uVexLength)
1871 {
1872 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1873 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1874 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1875 IEM_MC_PREPARE_AVX_USAGE();
1876 IEM_MC_LOCAL(RTUINT256U, uDst);
1877 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1878 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1879 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1880 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1881 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1882 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1883 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1884 IEM_MC_LOCAL(RTUINT256U, uSrc3);
1885 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
1886 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1887 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
1888 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1889 IEM_MC_ADVANCE_RIP_AND_FINISH();
1890 IEM_MC_END();
1891 }
1892 else
1893 {
1894 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1895 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1896 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1897 IEM_MC_PREPARE_AVX_USAGE();
1898 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1899 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1900 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1901 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1902 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
1903 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
1904 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
1905 IEM_MC_REF_XREG_U128_CONST(puSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1906 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
1907 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1908 IEM_MC_ADVANCE_RIP_AND_FINISH();
1909 IEM_MC_END();
1910 }
1911 }
1912 else
1913 {
1914 /*
1915 * Register, memory.
1916 */
1917 if (pVCpu->iem.s.uVexLength)
1918 {
1919 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1922 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1923 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1924 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1925 IEM_MC_PREPARE_AVX_USAGE();
1926
1927 IEM_MC_LOCAL(RTUINT256U, uSrc2);
1928 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
1929 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1930
1931 IEM_MC_LOCAL(RTUINT256U, uSrc1);
1932 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1933 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
1934 IEM_MC_LOCAL(RTUINT256U, uSrc3);
1935 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
1936 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1937 IEM_MC_LOCAL(RTUINT256U, uDst);
1938 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
1939 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
1940 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
1941
1942 IEM_MC_ADVANCE_RIP_AND_FINISH();
1943 IEM_MC_END();
1944 }
1945 else
1946 {
1947 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1950 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
1951 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
1952 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1953 IEM_MC_PREPARE_AVX_USAGE();
1954
1955 IEM_MC_LOCAL(RTUINT128U, uSrc2);
1956 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
1957 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1958
1959 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1960 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1961 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
1962 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
1963 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
1964 IEM_MC_REF_XREG_U128_CONST(puSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
1965 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
1966 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
1967
1968 IEM_MC_ADVANCE_RIP_AND_FINISH();
1969 IEM_MC_END();
1970 }
1971 }
1972}
1973
1974
1975/** Opcode VEX.66.0F3A 0x4a (vex only).
1976 * AVX, AVX */
1977FNIEMOP_DEF(iemOp_vblendvps_Vx_Hx_Wx_Lx)
1978{
1979 IEMOP_MNEMONIC4(VEX_RVMR, VBLENDVPS, vblendvps, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0);
1980 IEMOPBLENDOP_INIT_VARS(vblendvps);
1981 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
1982}
1983
1984
1985/** Opcode VEX.66.0F3A 0x4b (vex only).
1986 * AVX, AVX */
1987FNIEMOP_DEF(iemOp_vblendvpd_Vx_Hx_Wx_Lx)
1988{
1989 IEMOP_MNEMONIC4(VEX_RVMR, VBLENDVPD, vblendvpd, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0);
1990 IEMOPBLENDOP_INIT_VARS(vblendvpd);
1991 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
1992}
1993
1994
1995/**
1996 * Common worker for AVX2 instructions on the forms:
1997 * - vpxxx xmm0, xmm1, xmm2/mem128, xmm4
1998 * - vpxxx ymm0, ymm1, ymm2/mem256, ymm4
1999 *
2000 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
2001 * Additionally, both VEX.W and VEX.L must be zero.
2002 */
2003FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Lx, PCIEMOPBLENDOP, pImpl)
2004{
2005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2006 if (IEM_IS_MODRM_REG_MODE(bRm))
2007 {
2008 /*
2009 * Register, register.
2010 */
2011 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
2012 if (pVCpu->iem.s.uVexLength)
2013 {
2014 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2015 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx2);
2016 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2017 IEM_MC_PREPARE_AVX_USAGE();
2018
2019 IEM_MC_LOCAL(RTUINT256U, uSrc1);
2020 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
2021 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2022
2023 IEM_MC_LOCAL(RTUINT256U, uSrc2);
2024 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
2025 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2026
2027 IEM_MC_LOCAL(RTUINT256U, uSrc3);
2028 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
2029 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
2030
2031 IEM_MC_LOCAL(RTUINT256U, uDst);
2032 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
2033
2034 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
2035
2036 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2037 IEM_MC_ADVANCE_RIP_AND_FINISH();
2038 IEM_MC_END();
2039 }
2040 else
2041 {
2042 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2043 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
2044 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2045 IEM_MC_PREPARE_AVX_USAGE();
2046 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2047 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2048 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
2049 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2050 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
2051 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2052 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
2053 IEM_MC_REF_XREG_U128_CONST(puSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
2054 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
2055 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2056 IEM_MC_ADVANCE_RIP_AND_FINISH();
2057 IEM_MC_END();
2058 }
2059 }
2060 else
2061 {
2062 /*
2063 * Register, memory.
2064 */
2065 if (pVCpu->iem.s.uVexLength)
2066 {
2067 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2069
2070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2071 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
2072
2073 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx2);
2074 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2075 IEM_MC_PREPARE_AVX_USAGE();
2076
2077 IEM_MC_LOCAL(RTUINT256U, uSrc2);
2078 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
2079 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2080
2081 IEM_MC_LOCAL(RTUINT256U, uSrc1);
2082 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
2083 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2084
2085 IEM_MC_LOCAL(RTUINT256U, uSrc3);
2086 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3);
2087 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
2088
2089 IEM_MC_LOCAL(RTUINT256U, uDst);
2090 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
2091
2092 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3);
2093
2094 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2095 IEM_MC_ADVANCE_RIP_AND_FINISH();
2096 IEM_MC_END();
2097 }
2098 else
2099 {
2100 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2103 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4);
2104
2105 IEMOP_HLP_DONE_VEX_DECODING_W0_EX(fAvx);
2106 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2107 IEM_MC_PREPARE_AVX_USAGE();
2108
2109 IEM_MC_LOCAL(RTUINT128U, uSrc2);
2110 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
2111 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2112
2113 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2114 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2115 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
2116 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2117 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3);
2118 IEM_MC_REF_XREG_U128_CONST(puSrc3, IEM_GET_IMM8_REG(pVCpu, bOp4));
2119 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3);
2120 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2121
2122 IEM_MC_ADVANCE_RIP_AND_FINISH();
2123 IEM_MC_END();
2124 }
2125 }
2126}
2127
2128
2129/** Opcode VEX.66.0F3A 0x4c (vex only).
2130 * AVX, AVX2 */
2131FNIEMOP_DEF(iemOp_vpblendvb_Vx_Hx_Wx_Lx)
2132{
2133 /** @todo testcase: cover VEX.W=1 and check that it triggers \#UD on both real
2134 * and emulated hardware. */
2135 IEMOP_MNEMONIC4(VEX_RVMR, VPBLENDVB, vpblendvb, Vx_WO, Hx, Wx, Lx, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_W_ZERO);
2136 IEMOPBLENDOP_INIT_VARS(vpblendvb);
2137 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2138}
2139
2140
2141/* Opcode VEX.66.0F3A 0x4d - invalid */
2142/* Opcode VEX.66.0F3A 0x4e - invalid */
2143/* Opcode VEX.66.0F3A 0x4f - invalid */
2144
2145
2146/* Opcode VEX.66.0F3A 0x50 - invalid */
2147/* Opcode VEX.66.0F3A 0x51 - invalid */
2148/* Opcode VEX.66.0F3A 0x52 - invalid */
2149/* Opcode VEX.66.0F3A 0x53 - invalid */
2150/* Opcode VEX.66.0F3A 0x54 - invalid */
2151/* Opcode VEX.66.0F3A 0x55 - invalid */
2152/* Opcode VEX.66.0F3A 0x56 - invalid */
2153/* Opcode VEX.66.0F3A 0x57 - invalid */
2154/* Opcode VEX.66.0F3A 0x58 - invalid */
2155/* Opcode VEX.66.0F3A 0x59 - invalid */
2156/* Opcode VEX.66.0F3A 0x5a - invalid */
2157/* Opcode VEX.66.0F3A 0x5b - invalid */
2158/** Opcode VEX.66.0F3A 0x5c (AMD tables only). */
2159FNIEMOP_STUB(iemOp_vfmaddsubps_Vx_Lx_Wx_Hx);
2160/** Opcode VEX.66.0F3A 0x5d (AMD tables only). */
2161FNIEMOP_STUB(iemOp_vfmaddsubpd_Vx_Lx_Wx_Hx);
2162/** Opcode VEX.66.0F3A 0x5e (AMD tables only). */
2163FNIEMOP_STUB(iemOp_vfmsubaddps_Vx_Lx_Wx_Hx);
2164/** Opcode VEX.66.0F3A 0x5f (AMD tables only). */
2165FNIEMOP_STUB(iemOp_vfmsubaddpd_Vx_Lx_Wx_Hx);
2166
2167
2168/**
2169 * @opcode 0x60
2170 * @oppfx 0x66
2171 * @opflmodify cf,pf,af,zf,sf,of
2172 * @opflclear pf,af
2173 */
2174FNIEMOP_DEF(iemOp_vpcmpestrm_Vdq_Wdq_Ib)
2175{
2176 IEMOP_MNEMONIC3(VEX_RMI, VPCMPESTRM, vpcmpestrm, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2177
2178 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2179 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2180 {
2181 if (IEM_IS_MODRM_REG_MODE(bRm))
2182 {
2183 /*
2184 * Register, register.
2185 */
2186 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2187 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2188 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2189 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2190 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2191 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
2192 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
2193 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2194 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2195 IEM_MC_PREPARE_SSE_USAGE();
2196 IEM_MC_FETCH_XREG_PAIR_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
2197 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
2198 IEM_MC_REF_EFLAGS(pEFlags);
2199 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2200 iemAImpl_vpcmpestrm_u128,
2201 iemAImpl_vpcmpestrm_u128_fallback),
2202 puDst, pEFlags, pSrc, bImmArg);
2203 IEM_MC_ADVANCE_RIP_AND_FINISH();
2204 IEM_MC_END();
2205 }
2206 else
2207 {
2208 /*
2209 * Register, memory.
2210 */
2211 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2212 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2213 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2214 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
2215 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
2216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2217
2218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2219 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2220 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2221 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2222 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2223 IEM_MC_PREPARE_SSE_USAGE();
2224
2225 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
2226 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2227 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
2228 IEM_MC_REF_EFLAGS(pEFlags);
2229 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2230 iemAImpl_vpcmpestrm_u128,
2231 iemAImpl_vpcmpestrm_u128_fallback),
2232 puDst, pEFlags, pSrc, bImmArg);
2233 IEM_MC_ADVANCE_RIP_AND_FINISH();
2234 IEM_MC_END();
2235 }
2236 }
2237 else
2238 {
2239 if (IEM_IS_MODRM_REG_MODE(bRm))
2240 {
2241 /*
2242 * Register, register.
2243 */
2244 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2245 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2246 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2247 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2248 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2249 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
2250 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
2251 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2252 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2253 IEM_MC_PREPARE_SSE_USAGE();
2254 IEM_MC_FETCH_XREG_PAIR_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
2255 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
2256 IEM_MC_REF_EFLAGS(pEFlags);
2257 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42,
2258 iemAImpl_vpcmpestrm_u128,
2259 iemAImpl_vpcmpestrm_u128_fallback),
2260 puDst, pEFlags, pSrc, bImmArg);
2261 IEM_MC_ADVANCE_RIP_AND_FINISH();
2262 IEM_MC_END();
2263 }
2264 else
2265 {
2266 /*
2267 * Register, memory.
2268 */
2269 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2270 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2271 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2272 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
2273 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
2274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2275
2276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2277 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2278 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2279 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2280 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2281 IEM_MC_PREPARE_SSE_USAGE();
2282
2283 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
2284 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2285 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
2286 IEM_MC_REF_EFLAGS(pEFlags);
2287 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2288 iemAImpl_vpcmpestrm_u128,
2289 iemAImpl_vpcmpestrm_u128_fallback),
2290 puDst, pEFlags, pSrc, bImmArg);
2291 IEM_MC_ADVANCE_RIP_AND_FINISH();
2292 IEM_MC_END();
2293 }
2294 }
2295}
2296
2297
2298/**
2299 * @opcode 0x61
2300 * @oppfx 0x66
2301 * @opflmodify cf,pf,af,zf,sf,of
2302 * @opflclear pf,af
2303 */
2304FNIEMOP_DEF(iemOp_vpcmpestri_Vdq_Wdq_Ib)
2305{
2306 IEMOP_MNEMONIC3(VEX_RMI, VPCMPESTRI, vpcmpestri, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2307
2308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2309 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2310 {
2311 if (IEM_IS_MODRM_REG_MODE(bRm))
2312 {
2313 /*
2314 * Register, register.
2315 */
2316 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2317 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2318 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2319 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
2320 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2321 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
2322 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
2323 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2324 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2325 IEM_MC_PREPARE_SSE_USAGE();
2326 IEM_MC_FETCH_XREG_PAIR_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
2327 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
2328 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
2329 IEM_MC_REF_EFLAGS(pEFlags);
2330 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2331 iemAImpl_vpcmpestri_u128,
2332 iemAImpl_vpcmpestri_u128_fallback),
2333 pu32Ecx, pEFlags, pSrc, bImmArg);
2334 /** @todo testcase: High dword of RCX cleared? */
2335 IEM_MC_ADVANCE_RIP_AND_FINISH();
2336 IEM_MC_END();
2337 }
2338 else
2339 {
2340 /*
2341 * Register, memory.
2342 */
2343 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2344 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
2345 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2346 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
2347 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
2348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2349
2350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2351 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2352 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2353 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2354 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2355 IEM_MC_PREPARE_SSE_USAGE();
2356
2357 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
2358 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2359 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
2360 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
2361 IEM_MC_REF_EFLAGS(pEFlags);
2362 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2363 iemAImpl_vpcmpestri_u128,
2364 iemAImpl_vpcmpestri_u128_fallback),
2365 pu32Ecx, pEFlags, pSrc, bImmArg);
2366 /** @todo testcase: High dword of RCX cleared? */
2367 IEM_MC_ADVANCE_RIP_AND_FINISH();
2368 IEM_MC_END();
2369 }
2370 }
2371 else
2372 {
2373 if (IEM_IS_MODRM_REG_MODE(bRm))
2374 {
2375 /*
2376 * Register, register.
2377 */
2378 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2379 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2380 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2381 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
2382 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2383 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
2384 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
2385 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2386 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2387 IEM_MC_PREPARE_SSE_USAGE();
2388 IEM_MC_FETCH_XREG_PAIR_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
2389 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
2390 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
2391 IEM_MC_REF_EFLAGS(pEFlags);
2392 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2393 iemAImpl_vpcmpestri_u128,
2394 iemAImpl_vpcmpestri_u128_fallback),
2395 pu32Ecx, pEFlags, pSrc, bImmArg);
2396 /** @todo testcase: High dword of RCX cleared? */
2397 IEM_MC_ADVANCE_RIP_AND_FINISH();
2398 IEM_MC_END();
2399 }
2400 else
2401 {
2402 /*
2403 * Register, memory.
2404 */
2405 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2406 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
2407 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2408 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
2409 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
2410 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2411
2412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2413 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2414 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2415 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2416 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2417 IEM_MC_PREPARE_SSE_USAGE();
2418
2419 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
2420 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2421 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
2422 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
2423 IEM_MC_REF_EFLAGS(pEFlags);
2424 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2425 iemAImpl_vpcmpestri_u128,
2426 iemAImpl_vpcmpestri_u128_fallback),
2427 pu32Ecx, pEFlags, pSrc, bImmArg);
2428 /** @todo testcase: High dword of RCX cleared? */
2429 IEM_MC_ADVANCE_RIP_AND_FINISH();
2430 IEM_MC_END();
2431 }
2432 }
2433}
2434
2435
2436/**
2437 * @opcode 0x62
2438 * @oppfx 0x66
2439 * @opflmodify cf,pf,af,zf,sf,of
2440 * @opflclear pf,af
2441 */
2442FNIEMOP_DEF(iemOp_vpcmpistrm_Vdq_Wdq_Ib)
2443{
2444 IEMOP_MNEMONIC3(VEX_RMI, VPCMPISTRM, vpcmpistrm, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2445
2446 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2447 if (IEM_IS_MODRM_REG_MODE(bRm))
2448 {
2449 /*
2450 * Register, register.
2451 */
2452 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2453 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2454 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2455 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2456 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2457 IEM_MC_LOCAL(IEMPCMPISTRXSRC, Src);
2458 IEM_MC_ARG_LOCAL_REF(PIEMPCMPISTRXSRC, pSrc, Src, 2);
2459 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2460 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2461 IEM_MC_PREPARE_SSE_USAGE();
2462 IEM_MC_FETCH_XREG_PAIR_U128(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
2463 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
2464 IEM_MC_REF_EFLAGS(pEFlags);
2465 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2466 iemAImpl_vpcmpistrm_u128,
2467 iemAImpl_vpcmpistrm_u128_fallback),
2468 puDst, pEFlags, pSrc, bImmArg);
2469 IEM_MC_ADVANCE_RIP_AND_FINISH();
2470 IEM_MC_END();
2471 }
2472 else
2473 {
2474 /*
2475 * Register, memory.
2476 */
2477 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2478 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2479 IEM_MC_ARG(uint32_t *, pEFlags, 1);
2480 IEM_MC_LOCAL(IEMPCMPISTRXSRC, Src);
2481 IEM_MC_ARG_LOCAL_REF(PIEMPCMPISTRXSRC, pSrc, Src, 2);
2482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2483
2484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2485 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2486 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2487 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2488 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2489 IEM_MC_PREPARE_SSE_USAGE();
2490
2491 IEM_MC_FETCH_MEM_U128_AND_XREG_U128(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2492 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
2493 IEM_MC_REF_EFLAGS(pEFlags);
2494 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2495 iemAImpl_vpcmpistrm_u128,
2496 iemAImpl_vpcmpistrm_u128_fallback),
2497 puDst, pEFlags, pSrc, bImmArg);
2498 IEM_MC_ADVANCE_RIP_AND_FINISH();
2499 IEM_MC_END();
2500 }
2501}
2502
2503
2504/**
2505 * @opcode 0x63
2506 * @oppfx 0x66
2507 * @opflmodify cf,pf,af,zf,sf,of
2508 * @opflclear pf,af
2509 */
2510FNIEMOP_DEF(iemOp_vpcmpistri_Vdq_Wdq_Ib)
2511{
2512 IEMOP_MNEMONIC3(VEX_RMI, VPCMPISTRI, vpcmpistri, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2513
2514 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2515 if (IEM_IS_MODRM_REG_MODE(bRm))
2516 {
2517 /*
2518 * Register, register.
2519 */
2520 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2521 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2522 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2523 IEM_MC_ARG(uint32_t *, pEFlags, 0);
2524 IEM_MC_ARG(PCRTUINT128U, pSrc1, 1);
2525 IEM_MC_ARG(PCRTUINT128U, pSrc2, 2);
2526 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2527 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2528 IEM_MC_PREPARE_SSE_USAGE();
2529 IEM_MC_REF_XREG_U128_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2530 IEM_MC_REF_XREG_U128_CONST(pSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2531 IEM_MC_REF_EFLAGS(pEFlags);
2532 IEM_MC_CALL_AIMPL_4(uint32_t, u32Ecx,
2533 IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2534 iemAImpl_vpcmpistri_u128,
2535 iemAImpl_vpcmpistri_u128_fallback),
2536 pEFlags, pSrc1, pSrc2, bImmArg);
2537 /** @todo testcase: High dword of RCX cleared? */
2538 IEM_MC_STORE_GREG_U32(X86_GREG_xCX, u32Ecx);
2539 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
2540
2541 IEM_MC_ADVANCE_RIP_AND_FINISH();
2542 IEM_MC_END();
2543 }
2544 else
2545 {
2546 /*
2547 * Register, memory.
2548 */
2549 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2550 IEM_MC_ARG(uint32_t *, pEFlags, 0);
2551 IEM_MC_ARG(PCRTUINT128U, pSrc1, 1);
2552 IEM_MC_LOCAL(RTUINT128U, Src2);
2553 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc2, Src2, 2);
2554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2555
2556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2557 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2558 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
2559 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2560 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
2561 IEM_MC_PREPARE_SSE_USAGE();
2562
2563 IEM_MC_FETCH_MEM_U128(Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2564 IEM_MC_REF_XREG_U128_CONST(pSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2565 IEM_MC_REF_EFLAGS(pEFlags);
2566 IEM_MC_CALL_AIMPL_4(uint32_t, u32Ecx,
2567 IEM_SELECT_HOST_OR_FALLBACK(fAvx,
2568 iemAImpl_vpcmpistri_u128,
2569 iemAImpl_vpcmpistri_u128_fallback),
2570 pEFlags, pSrc1, pSrc2, bImmArg);
2571 /** @todo testcase: High dword of RCX cleared? */
2572 IEM_MC_STORE_GREG_U32(X86_GREG_xCX, u32Ecx);
2573 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xCX);
2574 IEM_MC_ADVANCE_RIP_AND_FINISH();
2575 IEM_MC_END();
2576 }
2577}
2578
2579
2580/* Opcode VEX.66.0F3A 0x64 - invalid */
2581/* Opcode VEX.66.0F3A 0x65 - invalid */
2582/* Opcode VEX.66.0F3A 0x66 - invalid */
2583/* Opcode VEX.66.0F3A 0x67 - invalid */
2584/** Opcode VEX.66.0F3A 0x68 (AMD tables only). */
2585FNIEMOP_STUB(iemOp_vfmaddps_Vx_Lx_Wx_Hx);
2586/** Opcode VEX.66.0F3A 0x69 (AMD tables only). */
2587FNIEMOP_STUB(iemOp_vfmaddpd_Vx_Lx_Wx_Hx);
2588/** Opcode VEX.66.0F3A 0x6a (AMD tables only). */
2589FNIEMOP_STUB(iemOp_vfmaddss_Vx_Lx_Wx_Hx);
2590/** Opcode VEX.66.0F3A 0x6b (AMD tables only). */
2591FNIEMOP_STUB(iemOp_vfmaddsd_Vx_Lx_Wx_Hx);
2592/** Opcode VEX.66.0F3A 0x6c (AMD tables only). */
2593FNIEMOP_STUB(iemOp_vfmsubps_Vx_Lx_Wx_Hx);
2594/** Opcode VEX.66.0F3A 0x6d (AMD tables only). */
2595FNIEMOP_STUB(iemOp_vfmsubpd_Vx_Lx_Wx_Hx);
2596/** Opcode VEX.66.0F3A 0x6e (AMD tables only). */
2597FNIEMOP_STUB(iemOp_vfmsubss_Vx_Lx_Wx_Hx);
2598/** Opcode VEX.66.0F3A 0x6f (AMD tables only). */
2599FNIEMOP_STUB(iemOp_vfmsubsd_Vx_Lx_Wx_Hx);
2600
2601/* Opcode VEX.66.0F3A 0x70 - invalid */
2602/* Opcode VEX.66.0F3A 0x71 - invalid */
2603/* Opcode VEX.66.0F3A 0x72 - invalid */
2604/* Opcode VEX.66.0F3A 0x73 - invalid */
2605/* Opcode VEX.66.0F3A 0x74 - invalid */
2606/* Opcode VEX.66.0F3A 0x75 - invalid */
2607/* Opcode VEX.66.0F3A 0x76 - invalid */
2608/* Opcode VEX.66.0F3A 0x77 - invalid */
2609/** Opcode VEX.66.0F3A 0x78 (AMD tables only). */
2610FNIEMOP_STUB(iemOp_vfnmaddps_Vx_Lx_Wx_Hx);
2611/** Opcode VEX.66.0F3A 0x79 (AMD tables only). */
2612FNIEMOP_STUB(iemOp_vfnmaddpd_Vx_Lx_Wx_Hx);
2613/** Opcode VEX.66.0F3A 0x7a (AMD tables only). */
2614FNIEMOP_STUB(iemOp_vfnmaddss_Vx_Lx_Wx_Hx);
2615/** Opcode VEX.66.0F3A 0x7b (AMD tables only). */
2616FNIEMOP_STUB(iemOp_vfnmaddsd_Vx_Lx_Wx_Hx);
2617/** Opcode VEX.66.0F3A 0x7c (AMD tables only). */
2618FNIEMOP_STUB(iemOp_vfnmsubps_Vx_Lx_Wx_Hx);
2619/** Opcode VEX.66.0F3A 0x7d (AMD tables only). */
2620FNIEMOP_STUB(iemOp_vfnmsubpd_Vx_Lx_Wx_Hx);
2621/** Opcode VEX.66.0F3A 0x7e (AMD tables only). */
2622FNIEMOP_STUB(iemOp_vfnmsubss_Vx_Lx_Wx_Hx);
2623/** Opcode VEX.66.0F3A 0x7f (AMD tables only). */
2624FNIEMOP_STUB(iemOp_vfnmsubsd_Vx_Lx_Wx_Hx);
2625
2626/* Opcodes 0x0f 0x80 thru 0x0f 0xb0 are unused. */
2627
2628
2629/* Opcode 0x0f 0xc0 - invalid */
2630/* Opcode 0x0f 0xc1 - invalid */
2631/* Opcode 0x0f 0xc2 - invalid */
2632/* Opcode 0x0f 0xc3 - invalid */
2633/* Opcode 0x0f 0xc4 - invalid */
2634/* Opcode 0x0f 0xc5 - invalid */
2635/* Opcode 0x0f 0xc6 - invalid */
2636/* Opcode 0x0f 0xc7 - invalid */
2637/* Opcode 0x0f 0xc8 - invalid */
2638/* Opcode 0x0f 0xc9 - invalid */
2639/* Opcode 0x0f 0xca - invalid */
2640/* Opcode 0x0f 0xcb - invalid */
2641/* Opcode 0x0f 0xcc - invalid */
2642/* Opcode 0x0f 0xcd - invalid */
2643/* Opcode 0x0f 0xce - invalid */
2644/* Opcode 0x0f 0xcf - invalid */
2645
2646
2647/* Opcode VEX.66.0F3A 0xd0 - invalid */
2648/* Opcode VEX.66.0F3A 0xd1 - invalid */
2649/* Opcode VEX.66.0F3A 0xd2 - invalid */
2650/* Opcode VEX.66.0F3A 0xd3 - invalid */
2651/* Opcode VEX.66.0F3A 0xd4 - invalid */
2652/* Opcode VEX.66.0F3A 0xd5 - invalid */
2653/* Opcode VEX.66.0F3A 0xd6 - invalid */
2654/* Opcode VEX.66.0F3A 0xd7 - invalid */
2655/* Opcode VEX.66.0F3A 0xd8 - invalid */
2656/* Opcode VEX.66.0F3A 0xd9 - invalid */
2657/* Opcode VEX.66.0F3A 0xda - invalid */
2658/* Opcode VEX.66.0F3A 0xdb - invalid */
2659/* Opcode VEX.66.0F3A 0xdc - invalid */
2660/* Opcode VEX.66.0F3A 0xdd - invalid */
2661/* Opcode VEX.66.0F3A 0xde - invalid */
2662
2663
2664/* Opcode VEX.66.0F3A 0xdf - (aeskeygenassist). */
2665FNIEMOP_DEF(iemOp_vaeskeygen_Vdq_Wdq_Ib)
2666{
2667 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2668 if (IEM_IS_MODRM_REG_MODE(bRm))
2669 {
2670 /*
2671 * Register, register.
2672 */
2673 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2674 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2675 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX_2(fAvx, fAesNi);
2676 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2677 IEM_MC_PREPARE_AVX_USAGE();
2678 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2679 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2680 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
2681 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2682 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2683 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_vaeskeygenassist_u128, iemAImpl_vaeskeygenassist_u128_fallback),
2684 puDst, puSrc, bImmArg);
2685 IEM_MC_ADVANCE_RIP_AND_FINISH();
2686 IEM_MC_END();
2687 }
2688 else
2689 {
2690 /*
2691 * Register, memory.
2692 */
2693 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2696 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
2697 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
2698 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX_2(fAvx, fAesNi);
2699
2700 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2701 IEM_MC_PREPARE_AVX_USAGE();
2702 IEM_MC_ARG(PRTUINT128U, puDst, 0);
2703 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2704 IEM_MC_LOCAL(RTUINT128U, uSrc);
2705 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
2706 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2707 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_vaeskeygenassist_u128, iemAImpl_vaeskeygenassist_u128_fallback),
2708 puDst, puSrc, bImmArg);
2709 IEM_MC_ADVANCE_RIP_AND_FINISH();
2710 IEM_MC_END();
2711 }
2712}
2713
2714
2715/**
2716 * @opcode 0xf0
2717 * @oppfx 0xf2
2718 * @opflclass unchanged
2719 */
2720FNIEMOP_DEF(iemOp_rorx_Gy_Ey_Ib)
2721{
2722 IEMOP_MNEMONIC3(VEX_RMI, RORX, rorx, Gy, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO | IEMOPHINT_VEX_V_ZERO);
2723 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT();
2724 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2725 if (IEM_IS_MODRM_REG_MODE(bRm))
2726 {
2727 /*
2728 * Register, register.
2729 */
2730 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
2731 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2732 {
2733 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2734 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
2735 IEM_MC_ARG(uint64_t *, pDst, 0);
2736 IEM_MC_ARG(uint64_t, uSrc1, 1);
2737 IEM_MC_ARG_CONST(uint64_t, uSrc2, bImm8, 2);
2738 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm));
2739 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2740 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u64, pDst, uSrc1, uSrc2);
2741 IEM_MC_ADVANCE_RIP_AND_FINISH();
2742 IEM_MC_END();
2743 }
2744 else
2745 {
2746 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2747 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
2748 IEM_MC_ARG(uint32_t *, pDst, 0);
2749 IEM_MC_ARG(uint32_t, uSrc1, 1);
2750 IEM_MC_ARG_CONST(uint32_t, uSrc2, bImm8, 2);
2751 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm));
2752 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2753 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u32, pDst, uSrc1, uSrc2);
2754 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
2755 IEM_MC_ADVANCE_RIP_AND_FINISH();
2756 IEM_MC_END();
2757 }
2758 }
2759 else
2760 {
2761 /*
2762 * Register, memory.
2763 */
2764 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2765 {
2766 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2767 IEM_MC_ARG(uint64_t *, pDst, 0);
2768 IEM_MC_ARG(uint64_t, uSrc1, 1);
2769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2770 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2771 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
2772 IEM_MC_ARG_CONST(uint64_t, uSrc2, bImm8, 2);
2773 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
2774 IEM_MC_FETCH_MEM_U64(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2775 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2776 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u64, pDst, uSrc1, uSrc2);
2777 IEM_MC_ADVANCE_RIP_AND_FINISH();
2778 IEM_MC_END();
2779 }
2780 else
2781 {
2782 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2783 IEM_MC_ARG(uint32_t *, pDst, 0);
2784 IEM_MC_ARG(uint32_t, uSrc1, 1);
2785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2787 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8);
2788 IEM_MC_ARG_CONST(uint32_t, uSrc2, bImm8, 2);
2789 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fBmi2);
2790 IEM_MC_FETCH_MEM_U32(uSrc1, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2791 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2792 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_rorx_u32, pDst, uSrc1, uSrc2);
2793 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm));
2794 IEM_MC_ADVANCE_RIP_AND_FINISH();
2795 IEM_MC_END();
2796 }
2797 }
2798}
2799
2800
2801/**
2802 * VEX opcode map \#3.
2803 *
2804 * @sa g_apfnThreeByte0f3a
2805 */
2806const PFNIEMOP g_apfnVexMap3[] =
2807{
2808 /* no prefix, 066h prefix f3h prefix, f2h prefix */
2809 /* 0x00 */ iemOp_InvalidNeedRMImm8, iemOp_vpermq_Vqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2810 /* 0x01 */ iemOp_InvalidNeedRMImm8, iemOp_vpermpd_Vqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2811 /* 0x02 */ iemOp_InvalidNeedRMImm8, iemOp_vpblendd_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2812 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2813 /* 0x04 */ iemOp_InvalidNeedRMImm8, iemOp_vpermilps_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2814 /* 0x05 */ iemOp_InvalidNeedRMImm8, iemOp_vpermilpd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2815 /* 0x06 */ iemOp_InvalidNeedRMImm8, iemOp_vperm2f128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2816 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2817 /* 0x08 */ iemOp_InvalidNeedRMImm8, iemOp_vroundps_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2818 /* 0x09 */ iemOp_InvalidNeedRMImm8, iemOp_vroundpd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2819 /* 0x0a */ iemOp_InvalidNeedRMImm8, iemOp_vroundss_Vss_Wss_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2820 /* 0x0b */ iemOp_InvalidNeedRMImm8, iemOp_vroundsd_Vsd_Wsd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2821 /* 0x0c */ iemOp_InvalidNeedRMImm8, iemOp_vblendps_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2822 /* 0x0d */ iemOp_InvalidNeedRMImm8, iemOp_vblendpd_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2823 /* 0x0e */ iemOp_InvalidNeedRMImm8, iemOp_vpblendw_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2824 /* 0x0f */ iemOp_InvalidNeedRMImm8, iemOp_vpalignr_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2825
2826 /* 0x10 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2827 /* 0x11 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2828 /* 0x12 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2829 /* 0x13 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2830 /* 0x14 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrb_Eb_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2831 /* 0x15 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrw_Ew_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2832 /* 0x16 */ iemOp_InvalidNeedRMImm8, iemOp_vpextrd_q_Ey_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2833 /* 0x17 */ iemOp_InvalidNeedRMImm8, iemOp_vextractps_Ed_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2834 /* 0x18 */ iemOp_InvalidNeedRMImm8, iemOp_vinsertf128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2835 /* 0x19 */ iemOp_InvalidNeedRMImm8, iemOp_vextractf128_Wdq_Vqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2836 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2837 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2838 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2839 /* 0x1d */ iemOp_InvalidNeedRMImm8, iemOp_vcvtps2ph_Wx_Vx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2840 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2841 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2842
2843 /* 0x20 */ iemOp_InvalidNeedRMImm8, iemOp_vpinsrb_Vdq_Hdq_RyMb_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2844 /* 0x21 */ iemOp_InvalidNeedRMImm8, iemOp_vinsertps_Vdq_Hdq_UdqMd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2845 /* 0x22 */ iemOp_InvalidNeedRMImm8, iemOp_vpinsrd_q_Vdq_Hdq_Ey_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2846 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2847 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2848 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2849 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2850 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2851 /* 0x28 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2852 /* 0x29 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2853 /* 0x2a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2854 /* 0x2b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2855 /* 0x2c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2856 /* 0x2d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2857 /* 0x2e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2858 /* 0x2f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2859
2860 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2861 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2862 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2863 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2864 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2865 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2866 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2867 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2868 /* 0x38 */ iemOp_InvalidNeedRMImm8, iemOp_vinserti128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2869 /* 0x39 */ iemOp_InvalidNeedRMImm8, iemOp_vextracti128_Wdq_Vqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2870 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2871 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2872 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2873 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2874 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2875 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2876
2877 /* 0x40 */ iemOp_InvalidNeedRMImm8, iemOp_vdpps_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2878 /* 0x41 */ iemOp_InvalidNeedRMImm8, iemOp_vdppd_Vdq_Hdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2879 /* 0x42 */ iemOp_InvalidNeedRMImm8, iemOp_vmpsadbw_Vx_Hx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2880 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2881 /* 0x44 */ iemOp_InvalidNeedRMImm8, iemOp_vpclmulqdq_Vdq_Hdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2882 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2883 /* 0x46 */ iemOp_InvalidNeedRMImm8, iemOp_vperm2i128_Vqq_Hqq_Wqq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2884 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2885 /* 0x48 */ iemOp_InvalidNeedRMImm8, iemOp_vperlmilzz2ps_Vx_Hx_Wp_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2886 /* 0x49 */ iemOp_InvalidNeedRMImm8, iemOp_vperlmilzz2pd_Vx_Hx_Wp_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2887 /* 0x4a */ iemOp_InvalidNeedRMImm8, iemOp_vblendvps_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2888 /* 0x4b */ iemOp_InvalidNeedRMImm8, iemOp_vblendvpd_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2889 /* 0x4c */ iemOp_InvalidNeedRMImm8, iemOp_vpblendvb_Vx_Hx_Wx_Lx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2890 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2891 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2892 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2893
2894 /* 0x50 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2895 /* 0x51 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2896 /* 0x52 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2897 /* 0x53 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2898 /* 0x54 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2899 /* 0x55 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2900 /* 0x56 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2901 /* 0x57 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2902 /* 0x58 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2903 /* 0x59 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2904 /* 0x5a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2905 /* 0x5b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2906 /* 0x5c */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2907 /* 0x5d */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2908 /* 0x5e */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2909 /* 0x5f */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2910
2911 /* 0x60 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpestrm_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2912 /* 0x61 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpestri_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2913 /* 0x62 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpistrm_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2914 /* 0x63 */ iemOp_InvalidNeedRMImm8, iemOp_vpcmpistri_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2915 /* 0x64 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2916 /* 0x65 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2917 /* 0x66 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2918 /* 0x67 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2919 /* 0x68 */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2920 /* 0x69 */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2921 /* 0x6a */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2922 /* 0x6b */ iemOp_InvalidNeedRMImm8, iemOp_vfmaddsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2923 /* 0x6c */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2924 /* 0x6d */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2925 /* 0x6e */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2926 /* 0x6f */ iemOp_InvalidNeedRMImm8, iemOp_vfmsubsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2927
2928 /* 0x70 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2929 /* 0x71 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2930 /* 0x72 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2931 /* 0x73 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2932 /* 0x74 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2933 /* 0x75 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2934 /* 0x76 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2935 /* 0x77 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2936 /* 0x78 */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2937 /* 0x79 */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2938 /* 0x7a */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2939 /* 0x7b */ iemOp_InvalidNeedRMImm8, iemOp_vfnmaddsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2940 /* 0x7c */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubps_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2941 /* 0x7d */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubpd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2942 /* 0x7e */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubss_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2943 /* 0x7f */ iemOp_InvalidNeedRMImm8, iemOp_vfnmsubsd_Vx_Lx_Wx_Hx, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
2944
2945 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2946 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2947 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2948 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2949 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2950 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2951 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2952 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2953 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2954 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2955 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2956 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2957 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2958 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2959 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2960 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2961
2962 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2963 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2964 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2965 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2966 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2967 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2968 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2969 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2970 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2971 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2972 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2973 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2974 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2975 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2976 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2977 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2978
2979 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2980 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2981 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2982 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2983 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2984 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2985 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2986 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2987 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2988 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2989 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2990 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2991 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2992 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2993 /* 0xae */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2994 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2995
2996 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2997 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2998 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
2999 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3000 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3001 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3002 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3003 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3004 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3005 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3006 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3007 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3008 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3009 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3010 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3011 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3012
3013 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3014 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3015 /* 0xc2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3016 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3017 /* 0xc4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3018 /* 0xc5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3019 /* 0xc6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3020 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3021 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3022 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3023 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3024 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3025 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3026 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3027 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3028 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3029
3030 /* 0xd0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3031 /* 0xd1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3032 /* 0xd2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3033 /* 0xd3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3034 /* 0xd4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3035 /* 0xd5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3036 /* 0xd6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3037 /* 0xd7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3038 /* 0xd8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3039 /* 0xd9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3040 /* 0xda */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3041 /* 0xdb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3042 /* 0xdc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3043 /* 0xdd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3044 /* 0xde */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3045 /* 0xdf */ iemOp_vaeskeygen_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
3046
3047 /* 0xe0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3048 /* 0xe1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3049 /* 0xe2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3050 /* 0xe3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3051 /* 0xe4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3052 /* 0xe5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3053 /* 0xe6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3054 /* 0xe7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3055 /* 0xe8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3056 /* 0xe9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3057 /* 0xea */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3058 /* 0xeb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3059 /* 0xec */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3060 /* 0xed */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3061 /* 0xee */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3062 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3063
3064 /* 0xf0 */ iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_rorx_Gy_Ey_Ib,
3065 /* 0xf1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3066 /* 0xf2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3067 /* 0xf3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3068 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3069 /* 0xf5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3070 /* 0xf6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3071 /* 0xf7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3072 /* 0xf8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3073 /* 0xf9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3074 /* 0xfa */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3075 /* 0xfb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3076 /* 0xfc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3077 /* 0xfd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3078 /* 0xfe */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3079 /* 0xff */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
3080};
3081AssertCompile(RT_ELEMENTS(g_apfnVexMap3) == 1024);
3082
3083/** @} */
3084
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette