VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h@ 97356

Last change on this file since 97356 was 97356, checked in by vboxsync, 2 years ago

VMM/IEM: IEM_MC_ADVANCE_RIP -> IEM_MC_ADVANCE_RIP_AND_FINISH in prep for TF and other #DB stuff. bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 206.2 KB
Line 
1/* $Id: IEMAllInstructionsVexMap1.cpp.h 97356 2022-10-31 22:36:29Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 1
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128
39 * - vpxxx ymm0, ymm1, ymm2/mem256
40 *
41 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
42 */
43FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
44{
45 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
46 if (IEM_IS_MODRM_REG_MODE(bRm))
47 {
48 /*
49 * Register, register.
50 */
51 if (pVCpu->iem.s.uVexLength)
52 {
53 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
54 IEM_MC_BEGIN(4, 3);
55 IEM_MC_LOCAL(RTUINT256U, uDst);
56 IEM_MC_LOCAL(RTUINT256U, uSrc1);
57 IEM_MC_LOCAL(RTUINT256U, uSrc2);
58 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
59 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
60 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
61 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
62 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
63 IEM_MC_PREPARE_AVX_USAGE();
64 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
65 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
66 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
67 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
68 IEM_MC_ADVANCE_RIP_AND_FINISH();
69 IEM_MC_END();
70 }
71 else
72 {
73 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
74 IEM_MC_BEGIN(4, 0);
75 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
76 IEM_MC_ARG(PRTUINT128U, puDst, 1);
77 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
78 IEM_MC_ARG(PCRTUINT128U, puSrc2, 3);
79 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
80 IEM_MC_PREPARE_AVX_USAGE();
81 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
82 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
83 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
84 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
85 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
86 IEM_MC_ADVANCE_RIP_AND_FINISH();
87 IEM_MC_END();
88 }
89 }
90 else
91 {
92 /*
93 * Register, memory.
94 */
95 if (pVCpu->iem.s.uVexLength)
96 {
97 IEM_MC_BEGIN(4, 4);
98 IEM_MC_LOCAL(RTUINT256U, uDst);
99 IEM_MC_LOCAL(RTUINT256U, uSrc1);
100 IEM_MC_LOCAL(RTUINT256U, uSrc2);
101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
102 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
103 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
104 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
105 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
106
107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
108 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
109 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
110 IEM_MC_PREPARE_AVX_USAGE();
111
112 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
113 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
114 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
115 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
116
117 IEM_MC_ADVANCE_RIP_AND_FINISH();
118 IEM_MC_END();
119 }
120 else
121 {
122 IEM_MC_BEGIN(4, 2);
123 IEM_MC_LOCAL(RTUINT128U, uSrc2);
124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
125 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
126 IEM_MC_ARG(PRTUINT128U, puDst, 1);
127 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
128 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 3);
129
130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
131 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
132 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
133 IEM_MC_PREPARE_AVX_USAGE();
134
135 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
136 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
137 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
138 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
139 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
140
141 IEM_MC_ADVANCE_RIP_AND_FINISH();
142 IEM_MC_END();
143 }
144 }
145 return VINF_SUCCESS;
146}
147
148
149/**
150 * Common worker for AVX2 instructions on the forms:
151 * - vpxxx xmm0, xmm1, xmm2/mem128
152 * - vpxxx ymm0, ymm1, ymm2/mem256
153 *
154 * Takes function table for function w/o implicit state parameter.
155 *
156 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
157 */
158FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
159{
160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
161 if (IEM_IS_MODRM_REG_MODE(bRm))
162 {
163 /*
164 * Register, register.
165 */
166 if (pVCpu->iem.s.uVexLength)
167 {
168 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
169 IEM_MC_BEGIN(3, 3);
170 IEM_MC_LOCAL(RTUINT256U, uDst);
171 IEM_MC_LOCAL(RTUINT256U, uSrc1);
172 IEM_MC_LOCAL(RTUINT256U, uSrc2);
173 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
174 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
175 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
176 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
177 IEM_MC_PREPARE_AVX_USAGE();
178 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
179 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
180 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
181 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
182 IEM_MC_ADVANCE_RIP_AND_FINISH();
183 IEM_MC_END();
184 }
185 else
186 {
187 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
188 IEM_MC_BEGIN(3, 0);
189 IEM_MC_ARG(PRTUINT128U, puDst, 0);
190 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
191 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
192 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
193 IEM_MC_PREPARE_AVX_USAGE();
194 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
195 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
196 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
197 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
198 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
199 IEM_MC_ADVANCE_RIP_AND_FINISH();
200 IEM_MC_END();
201 }
202 }
203 else
204 {
205 /*
206 * Register, memory.
207 */
208 if (pVCpu->iem.s.uVexLength)
209 {
210 IEM_MC_BEGIN(3, 4);
211 IEM_MC_LOCAL(RTUINT256U, uDst);
212 IEM_MC_LOCAL(RTUINT256U, uSrc1);
213 IEM_MC_LOCAL(RTUINT256U, uSrc2);
214 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
215 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
216 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
217 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
218
219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
220 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
221 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
222 IEM_MC_PREPARE_AVX_USAGE();
223
224 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
225 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
226 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
227 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
228
229 IEM_MC_ADVANCE_RIP_AND_FINISH();
230 IEM_MC_END();
231 }
232 else
233 {
234 IEM_MC_BEGIN(3, 2);
235 IEM_MC_LOCAL(RTUINT128U, uSrc2);
236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
237 IEM_MC_ARG(PRTUINT128U, puDst, 0);
238 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
239 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
240
241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
242 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
243 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
244 IEM_MC_PREPARE_AVX_USAGE();
245
246 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
247 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
248 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
249 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
250 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
251
252 IEM_MC_ADVANCE_RIP_AND_FINISH();
253 IEM_MC_END();
254 }
255 }
256 return VINF_SUCCESS;
257}
258
259
260/**
261 * Common worker for AVX2 instructions on the forms:
262 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
263 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
264 *
265 * The 128-bit memory version of this instruction may elect to skip fetching the
266 * lower 64 bits of the operand. We, however, do not.
267 *
268 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
269 */
270FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
271{
272 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
273}
274
275
276/**
277 * Common worker for AVX2 instructions on the forms:
278 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
279 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
280 *
281 * The 128-bit memory version of this instruction may elect to skip fetching the
282 * higher 64 bits of the operand. We, however, do not.
283 *
284 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
285 */
286FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
287{
288 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
289}
290
291
292/**
293 * Common worker for AVX2 instructions on the forms:
294 * - vpxxx xmm0, xmm1/mem128
295 * - vpxxx ymm0, ymm1/mem256
296 *
297 * Takes function table for function w/o implicit state parameter.
298 *
299 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
300 */
301FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, PCIEMOPMEDIAOPTF2, pImpl)
302{
303 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
304 if (IEM_IS_MODRM_REG_MODE(bRm))
305 {
306 /*
307 * Register, register.
308 */
309 if (pVCpu->iem.s.uVexLength)
310 {
311 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
312 IEM_MC_BEGIN(2, 2);
313 IEM_MC_LOCAL(RTUINT256U, uDst);
314 IEM_MC_LOCAL(RTUINT256U, uSrc);
315 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
316 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
317 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
318 IEM_MC_PREPARE_AVX_USAGE();
319 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
320 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
321 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
322 IEM_MC_ADVANCE_RIP_AND_FINISH();
323 IEM_MC_END();
324 }
325 else
326 {
327 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
328 IEM_MC_BEGIN(2, 0);
329 IEM_MC_ARG(PRTUINT128U, puDst, 0);
330 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
331 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
332 IEM_MC_PREPARE_AVX_USAGE();
333 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
334 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
335 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
336 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
337 IEM_MC_ADVANCE_RIP_AND_FINISH();
338 IEM_MC_END();
339 }
340 }
341 else
342 {
343 /*
344 * Register, memory.
345 */
346 if (pVCpu->iem.s.uVexLength)
347 {
348 IEM_MC_BEGIN(2, 3);
349 IEM_MC_LOCAL(RTUINT256U, uDst);
350 IEM_MC_LOCAL(RTUINT256U, uSrc);
351 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
352 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
353 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
354
355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
356 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
357 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
358 IEM_MC_PREPARE_AVX_USAGE();
359
360 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
361 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
362 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
363
364 IEM_MC_ADVANCE_RIP_AND_FINISH();
365 IEM_MC_END();
366 }
367 else
368 {
369 IEM_MC_BEGIN(2, 2);
370 IEM_MC_LOCAL(RTUINT128U, uSrc);
371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
372 IEM_MC_ARG(PRTUINT128U, puDst, 0);
373 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
374
375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
376 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
377 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
378 IEM_MC_PREPARE_AVX_USAGE();
379
380 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
381 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
382 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
383 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
384
385 IEM_MC_ADVANCE_RIP_AND_FINISH();
386 IEM_MC_END();
387 }
388 }
389 return VINF_SUCCESS;
390}
391
392
393/* Opcode VEX.0F 0x00 - invalid */
394/* Opcode VEX.0F 0x01 - invalid */
395/* Opcode VEX.0F 0x02 - invalid */
396/* Opcode VEX.0F 0x03 - invalid */
397/* Opcode VEX.0F 0x04 - invalid */
398/* Opcode VEX.0F 0x05 - invalid */
399/* Opcode VEX.0F 0x06 - invalid */
400/* Opcode VEX.0F 0x07 - invalid */
401/* Opcode VEX.0F 0x08 - invalid */
402/* Opcode VEX.0F 0x09 - invalid */
403/* Opcode VEX.0F 0x0a - invalid */
404
405/** Opcode VEX.0F 0x0b. */
406FNIEMOP_DEF(iemOp_vud2)
407{
408 IEMOP_MNEMONIC(vud2, "vud2");
409 return IEMOP_RAISE_INVALID_OPCODE();
410}
411
412/* Opcode VEX.0F 0x0c - invalid */
413/* Opcode VEX.0F 0x0d - invalid */
414/* Opcode VEX.0F 0x0e - invalid */
415/* Opcode VEX.0F 0x0f - invalid */
416
417
418/**
419 * @opcode 0x10
420 * @oppfx none
421 * @opcpuid avx
422 * @opgroup og_avx_simdfp_datamove
423 * @opxcpttype 4UA
424 * @optest op1=1 op2=2 -> op1=2
425 * @optest op1=0 op2=-22 -> op1=-22
426 */
427FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
428{
429 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
430 Assert(pVCpu->iem.s.uVexLength <= 1);
431 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
432 if (IEM_IS_MODRM_REG_MODE(bRm))
433 {
434 /*
435 * Register, register.
436 */
437 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
438 IEM_MC_BEGIN(0, 0);
439 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
440 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
441 if (pVCpu->iem.s.uVexLength == 0)
442 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
443 IEM_GET_MODRM_RM(pVCpu, bRm));
444 else
445 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
446 IEM_GET_MODRM_RM(pVCpu, bRm));
447 IEM_MC_ADVANCE_RIP_AND_FINISH();
448 IEM_MC_END();
449 }
450 else if (pVCpu->iem.s.uVexLength == 0)
451 {
452 /*
453 * 128-bit: Register, Memory
454 */
455 IEM_MC_BEGIN(0, 2);
456 IEM_MC_LOCAL(RTUINT128U, uSrc);
457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
458
459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
460 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
461 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
462 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
463
464 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
465 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
466
467 IEM_MC_ADVANCE_RIP_AND_FINISH();
468 IEM_MC_END();
469 }
470 else
471 {
472 /*
473 * 256-bit: Register, Memory
474 */
475 IEM_MC_BEGIN(0, 2);
476 IEM_MC_LOCAL(RTUINT256U, uSrc);
477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
478
479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
480 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
481 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
482 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
483
484 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
485 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
486
487 IEM_MC_ADVANCE_RIP_AND_FINISH();
488 IEM_MC_END();
489 }
490 return VINF_SUCCESS;
491}
492
493
494/**
495 * @opcode 0x10
496 * @oppfx 0x66
497 * @opcpuid avx
498 * @opgroup og_avx_simdfp_datamove
499 * @opxcpttype 4UA
500 * @optest op1=1 op2=2 -> op1=2
501 * @optest op1=0 op2=-22 -> op1=-22
502 */
503FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
504{
505 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
506 Assert(pVCpu->iem.s.uVexLength <= 1);
507 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
508 if (IEM_IS_MODRM_REG_MODE(bRm))
509 {
510 /*
511 * Register, register.
512 */
513 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
514 IEM_MC_BEGIN(0, 0);
515 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
516 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
517 if (pVCpu->iem.s.uVexLength == 0)
518 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
519 IEM_GET_MODRM_RM(pVCpu, bRm));
520 else
521 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
522 IEM_GET_MODRM_RM(pVCpu, bRm));
523 IEM_MC_ADVANCE_RIP_AND_FINISH();
524 IEM_MC_END();
525 }
526 else if (pVCpu->iem.s.uVexLength == 0)
527 {
528 /*
529 * 128-bit: Memory, register.
530 */
531 IEM_MC_BEGIN(0, 2);
532 IEM_MC_LOCAL(RTUINT128U, uSrc);
533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
534
535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
536 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
537 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
538 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
539
540 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
541 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
542
543 IEM_MC_ADVANCE_RIP_AND_FINISH();
544 IEM_MC_END();
545 }
546 else
547 {
548 /*
549 * 256-bit: Memory, register.
550 */
551 IEM_MC_BEGIN(0, 2);
552 IEM_MC_LOCAL(RTUINT256U, uSrc);
553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
554
555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
556 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
557 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
558 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
559
560 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
561 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
562
563 IEM_MC_ADVANCE_RIP_AND_FINISH();
564 IEM_MC_END();
565 }
566 return VINF_SUCCESS;
567}
568
569
570FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
571{
572 Assert(pVCpu->iem.s.uVexLength <= 1);
573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
574 if (IEM_IS_MODRM_REG_MODE(bRm))
575 {
576 /**
577 * @opcode 0x10
578 * @oppfx 0xf3
579 * @opcodesub 11 mr/reg
580 * @opcpuid avx
581 * @opgroup og_avx_simdfp_datamerge
582 * @opxcpttype 5
583 * @optest op1=1 op2=0 op3=2 -> op1=2
584 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
585 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
586 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
587 * @note HssHi refers to bits 127:32.
588 */
589 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
590 IEMOP_HLP_DONE_VEX_DECODING();
591 IEM_MC_BEGIN(0, 0);
592
593 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
594 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
595 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
596 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
597 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
598 IEM_MC_ADVANCE_RIP_AND_FINISH();
599 IEM_MC_END();
600 }
601 else
602 {
603 /**
604 * @opdone
605 * @opcode 0x10
606 * @oppfx 0xf3
607 * @opcodesub !11 mr/reg
608 * @opcpuid avx
609 * @opgroup og_avx_simdfp_datamove
610 * @opxcpttype 5
611 * @opfunction iemOp_vmovss_Vss_Hss_Wss
612 * @optest op1=1 op2=2 -> op1=2
613 * @optest op1=0 op2=-22 -> op1=-22
614 */
615 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
616 IEM_MC_BEGIN(0, 2);
617 IEM_MC_LOCAL(uint32_t, uSrc);
618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
619
620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
621 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
622 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
623 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
624
625 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
626 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
627
628 IEM_MC_ADVANCE_RIP_AND_FINISH();
629 IEM_MC_END();
630 }
631
632 return VINF_SUCCESS;
633}
634
635
636FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
637{
638 Assert(pVCpu->iem.s.uVexLength <= 1);
639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
640 if (IEM_IS_MODRM_REG_MODE(bRm))
641 {
642 /**
643 * @opcode 0x10
644 * @oppfx 0xf2
645 * @opcodesub 11 mr/reg
646 * @opcpuid avx
647 * @opgroup og_avx_simdfp_datamerge
648 * @opxcpttype 5
649 * @optest op1=1 op2=0 op3=2 -> op1=2
650 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
651 * @optest op1=3 op2=-1 op3=0x77 ->
652 * op1=0xffffffffffffffff0000000000000077
653 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
654 */
655 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
656 IEMOP_HLP_DONE_VEX_DECODING();
657 IEM_MC_BEGIN(0, 0);
658
659 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
660 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
661 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
662 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
663 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
664 IEM_MC_ADVANCE_RIP_AND_FINISH();
665 IEM_MC_END();
666 }
667 else
668 {
669 /**
670 * @opdone
671 * @opcode 0x10
672 * @oppfx 0xf2
673 * @opcodesub !11 mr/reg
674 * @opcpuid avx
675 * @opgroup og_avx_simdfp_datamove
676 * @opxcpttype 5
677 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
678 * @optest op1=1 op2=2 -> op1=2
679 * @optest op1=0 op2=-22 -> op1=-22
680 */
681 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
682 IEM_MC_BEGIN(0, 2);
683 IEM_MC_LOCAL(uint64_t, uSrc);
684 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
685
686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
687 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
688 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
689 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
690
691 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
692 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
693
694 IEM_MC_ADVANCE_RIP_AND_FINISH();
695 IEM_MC_END();
696 }
697
698 return VINF_SUCCESS;
699}
700
701
702/**
703 * @opcode 0x11
704 * @oppfx none
705 * @opcpuid avx
706 * @opgroup og_avx_simdfp_datamove
707 * @opxcpttype 4UA
708 * @optest op1=1 op2=2 -> op1=2
709 * @optest op1=0 op2=-22 -> op1=-22
710 */
711FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
712{
713 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
714 Assert(pVCpu->iem.s.uVexLength <= 1);
715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
716 if (IEM_IS_MODRM_REG_MODE(bRm))
717 {
718 /*
719 * Register, register.
720 */
721 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
722 IEM_MC_BEGIN(0, 0);
723 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
724 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
725 if (pVCpu->iem.s.uVexLength == 0)
726 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
727 IEM_GET_MODRM_REG(pVCpu, bRm));
728 else
729 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
730 IEM_GET_MODRM_REG(pVCpu, bRm));
731 IEM_MC_ADVANCE_RIP_AND_FINISH();
732 IEM_MC_END();
733 }
734 else if (pVCpu->iem.s.uVexLength == 0)
735 {
736 /*
737 * 128-bit: Memory, register.
738 */
739 IEM_MC_BEGIN(0, 2);
740 IEM_MC_LOCAL(RTUINT128U, uSrc);
741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
742
743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
744 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
745 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
746 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
747
748 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
749 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
750
751 IEM_MC_ADVANCE_RIP_AND_FINISH();
752 IEM_MC_END();
753 }
754 else
755 {
756 /*
757 * 256-bit: Memory, register.
758 */
759 IEM_MC_BEGIN(0, 2);
760 IEM_MC_LOCAL(RTUINT256U, uSrc);
761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
762
763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
764 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
765 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
766 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
767
768 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
769 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
770
771 IEM_MC_ADVANCE_RIP_AND_FINISH();
772 IEM_MC_END();
773 }
774 return VINF_SUCCESS;
775}
776
777
778/**
779 * @opcode 0x11
780 * @oppfx 0x66
781 * @opcpuid avx
782 * @opgroup og_avx_simdfp_datamove
783 * @opxcpttype 4UA
784 * @optest op1=1 op2=2 -> op1=2
785 * @optest op1=0 op2=-22 -> op1=-22
786 */
787FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
788{
789 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
790 Assert(pVCpu->iem.s.uVexLength <= 1);
791 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
792 if (IEM_IS_MODRM_REG_MODE(bRm))
793 {
794 /*
795 * Register, register.
796 */
797 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
798 IEM_MC_BEGIN(0, 0);
799 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
800 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
801 if (pVCpu->iem.s.uVexLength == 0)
802 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
803 IEM_GET_MODRM_REG(pVCpu, bRm));
804 else
805 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
806 IEM_GET_MODRM_REG(pVCpu, bRm));
807 IEM_MC_ADVANCE_RIP_AND_FINISH();
808 IEM_MC_END();
809 }
810 else if (pVCpu->iem.s.uVexLength == 0)
811 {
812 /*
813 * 128-bit: Memory, register.
814 */
815 IEM_MC_BEGIN(0, 2);
816 IEM_MC_LOCAL(RTUINT128U, uSrc);
817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
818
819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
820 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
821 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
822 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
823
824 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
825 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
826
827 IEM_MC_ADVANCE_RIP_AND_FINISH();
828 IEM_MC_END();
829 }
830 else
831 {
832 /*
833 * 256-bit: Memory, register.
834 */
835 IEM_MC_BEGIN(0, 2);
836 IEM_MC_LOCAL(RTUINT256U, uSrc);
837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
838
839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
840 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
841 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
842 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
843
844 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
845 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
846
847 IEM_MC_ADVANCE_RIP_AND_FINISH();
848 IEM_MC_END();
849 }
850 return VINF_SUCCESS;
851}
852
853
854FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
855{
856 Assert(pVCpu->iem.s.uVexLength <= 1);
857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
858 if (IEM_IS_MODRM_REG_MODE(bRm))
859 {
860 /**
861 * @opcode 0x11
862 * @oppfx 0xf3
863 * @opcodesub 11 mr/reg
864 * @opcpuid avx
865 * @opgroup og_avx_simdfp_datamerge
866 * @opxcpttype 5
867 * @optest op1=1 op2=0 op3=2 -> op1=2
868 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
869 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
870 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
871 */
872 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
873 IEMOP_HLP_DONE_VEX_DECODING();
874 IEM_MC_BEGIN(0, 0);
875
876 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
877 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
878 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
879 IEM_GET_MODRM_REG(pVCpu, bRm),
880 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
881 IEM_MC_ADVANCE_RIP_AND_FINISH();
882 IEM_MC_END();
883 }
884 else
885 {
886 /**
887 * @opdone
888 * @opcode 0x11
889 * @oppfx 0xf3
890 * @opcodesub !11 mr/reg
891 * @opcpuid avx
892 * @opgroup og_avx_simdfp_datamove
893 * @opxcpttype 5
894 * @opfunction iemOp_vmovss_Vss_Hss_Wss
895 * @optest op1=1 op2=2 -> op1=2
896 * @optest op1=0 op2=-22 -> op1=-22
897 */
898 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
899 IEM_MC_BEGIN(0, 2);
900 IEM_MC_LOCAL(uint32_t, uSrc);
901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
902
903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
904 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
905 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
906 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
907
908 IEM_MC_FETCH_YREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
909 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
910
911 IEM_MC_ADVANCE_RIP_AND_FINISH();
912 IEM_MC_END();
913 }
914
915 return VINF_SUCCESS;
916}
917
918
919FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
920{
921 Assert(pVCpu->iem.s.uVexLength <= 1);
922 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
923 if (IEM_IS_MODRM_REG_MODE(bRm))
924 {
925 /**
926 * @opcode 0x11
927 * @oppfx 0xf2
928 * @opcodesub 11 mr/reg
929 * @opcpuid avx
930 * @opgroup og_avx_simdfp_datamerge
931 * @opxcpttype 5
932 * @optest op1=1 op2=0 op3=2 -> op1=2
933 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
934 * @optest op1=3 op2=-1 op3=0x77 ->
935 * op1=0xffffffffffffffff0000000000000077
936 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
937 */
938 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
939 IEMOP_HLP_DONE_VEX_DECODING();
940 IEM_MC_BEGIN(0, 0);
941
942 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
943 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
944 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
945 IEM_GET_MODRM_REG(pVCpu, bRm),
946 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
947 IEM_MC_ADVANCE_RIP_AND_FINISH();
948 IEM_MC_END();
949 }
950 else
951 {
952 /**
953 * @opdone
954 * @opcode 0x11
955 * @oppfx 0xf2
956 * @opcodesub !11 mr/reg
957 * @opcpuid avx
958 * @opgroup og_avx_simdfp_datamove
959 * @opxcpttype 5
960 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
961 * @optest op1=1 op2=2 -> op1=2
962 * @optest op1=0 op2=-22 -> op1=-22
963 */
964 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
965 IEM_MC_BEGIN(0, 2);
966 IEM_MC_LOCAL(uint64_t, uSrc);
967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
968
969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
970 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
971 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
972 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
973
974 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
975 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
976
977 IEM_MC_ADVANCE_RIP_AND_FINISH();
978 IEM_MC_END();
979 }
980
981 return VINF_SUCCESS;
982}
983
984
985FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
986{
987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
988 if (IEM_IS_MODRM_REG_MODE(bRm))
989 {
990 /**
991 * @opcode 0x12
992 * @opcodesub 11 mr/reg
993 * @oppfx none
994 * @opcpuid avx
995 * @opgroup og_avx_simdfp_datamerge
996 * @opxcpttype 7LZ
997 * @optest op2=0x2200220122022203
998 * op3=0x3304330533063307
999 * -> op1=0x22002201220222033304330533063307
1000 * @optest op2=-1 op3=-42 -> op1=-42
1001 * @note op3 and op2 are only the 8-byte high XMM register halfs.
1002 */
1003 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1004
1005 IEMOP_HLP_DONE_VEX_DECODING_L0();
1006 IEM_MC_BEGIN(0, 0);
1007
1008 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1009 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1010 IEM_MC_MERGE_YREG_U64HI_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1011 IEM_GET_MODRM_RM(pVCpu, bRm),
1012 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1013
1014 IEM_MC_ADVANCE_RIP_AND_FINISH();
1015 IEM_MC_END();
1016 }
1017 else
1018 {
1019 /**
1020 * @opdone
1021 * @opcode 0x12
1022 * @opcodesub !11 mr/reg
1023 * @oppfx none
1024 * @opcpuid avx
1025 * @opgroup og_avx_simdfp_datamove
1026 * @opxcpttype 5LZ
1027 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1028 * @optest op1=1 op2=0 op3=0 -> op1=0
1029 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
1030 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
1031 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
1032 */
1033 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1034
1035 IEM_MC_BEGIN(0, 2);
1036 IEM_MC_LOCAL(uint64_t, uSrc);
1037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1038
1039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1040 IEMOP_HLP_DONE_VEX_DECODING_L0();
1041 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1042 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1043
1044 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1045 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1046 uSrc,
1047 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1048
1049 IEM_MC_ADVANCE_RIP_AND_FINISH();
1050 IEM_MC_END();
1051 }
1052 return VINF_SUCCESS;
1053}
1054
1055
1056/**
1057 * @opcode 0x12
1058 * @opcodesub !11 mr/reg
1059 * @oppfx 0x66
1060 * @opcpuid avx
1061 * @opgroup og_avx_pcksclr_datamerge
1062 * @opxcpttype 5LZ
1063 * @optest op2=0 op3=2 -> op1=2
1064 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
1065 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
1066 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
1067 */
1068FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1069{
1070 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1071 if (IEM_IS_MODRM_MEM_MODE(bRm))
1072 {
1073 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1074
1075 IEM_MC_BEGIN(0, 2);
1076 IEM_MC_LOCAL(uint64_t, uSrc);
1077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1078
1079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1080 IEMOP_HLP_DONE_VEX_DECODING_L0();
1081 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1082 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1083
1084 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1085 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1086 uSrc,
1087 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1088
1089 IEM_MC_ADVANCE_RIP_AND_FINISH();
1090 IEM_MC_END();
1091 return VINF_SUCCESS;
1092 }
1093
1094 /**
1095 * @opdone
1096 * @opmnemonic udvex660f12m3
1097 * @opcode 0x12
1098 * @opcodesub 11 mr/reg
1099 * @oppfx 0x66
1100 * @opunused immediate
1101 * @opcpuid avx
1102 * @optest ->
1103 */
1104 return IEMOP_RAISE_INVALID_OPCODE();
1105}
1106
1107
1108/**
1109 * @opcode 0x12
1110 * @oppfx 0xf3
1111 * @opcpuid avx
1112 * @opgroup og_avx_pcksclr_datamove
1113 * @opxcpttype 4
1114 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1115 * -> op1=0x00000002000000020000000100000001
1116 * @optest vex.l==1 /
1117 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1118 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1119 */
1120FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1121{
1122 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1123 Assert(pVCpu->iem.s.uVexLength <= 1);
1124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1125 if (IEM_IS_MODRM_REG_MODE(bRm))
1126 {
1127 /*
1128 * Register, register.
1129 */
1130 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1131 if (pVCpu->iem.s.uVexLength == 0)
1132 {
1133 IEM_MC_BEGIN(2, 0);
1134 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1135 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1136
1137 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1138 IEM_MC_PREPARE_AVX_USAGE();
1139
1140 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1141 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1142 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1143 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1144
1145 IEM_MC_ADVANCE_RIP_AND_FINISH();
1146 IEM_MC_END();
1147 }
1148 else
1149 {
1150 IEM_MC_BEGIN(3, 0);
1151 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1152 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1153 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1154
1155 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1156 IEM_MC_PREPARE_AVX_USAGE();
1157 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rr, iYRegDst, iYRegSrc);
1158
1159 IEM_MC_ADVANCE_RIP_AND_FINISH();
1160 IEM_MC_END();
1161 }
1162 }
1163 else
1164 {
1165 /*
1166 * Register, memory.
1167 */
1168 if (pVCpu->iem.s.uVexLength == 0)
1169 {
1170 IEM_MC_BEGIN(2, 2);
1171 IEM_MC_LOCAL(RTUINT128U, uSrc);
1172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1173 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1174 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1175
1176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1177 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1178 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1179 IEM_MC_PREPARE_AVX_USAGE();
1180
1181 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1182 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1183 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1184 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1185
1186 IEM_MC_ADVANCE_RIP_AND_FINISH();
1187 IEM_MC_END();
1188 }
1189 else
1190 {
1191 IEM_MC_BEGIN(3, 2);
1192 IEM_MC_LOCAL(RTUINT256U, uSrc);
1193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1194 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1195 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1196 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1197
1198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1199 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1200 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1201 IEM_MC_PREPARE_AVX_USAGE();
1202
1203 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1204 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rm, iYRegDst, puSrc);
1205
1206 IEM_MC_ADVANCE_RIP_AND_FINISH();
1207 IEM_MC_END();
1208 }
1209 }
1210 return VINF_SUCCESS;
1211}
1212
1213
1214/**
1215 * @opcode 0x12
1216 * @oppfx 0xf2
1217 * @opcpuid avx
1218 * @opgroup og_avx_pcksclr_datamove
1219 * @opxcpttype 5
1220 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1221 * -> op1=0x22222222111111112222222211111111
1222 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1223 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1224 */
1225FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1226{
1227 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1228 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1229 if (IEM_IS_MODRM_REG_MODE(bRm))
1230 {
1231 /*
1232 * Register, register.
1233 */
1234 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1235 if (pVCpu->iem.s.uVexLength == 0)
1236 {
1237 IEM_MC_BEGIN(2, 0);
1238 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1239 IEM_MC_ARG(uint64_t, uSrc, 1);
1240
1241 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1242 IEM_MC_PREPARE_AVX_USAGE();
1243
1244 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1245 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1246 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1247 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1248
1249 IEM_MC_ADVANCE_RIP_AND_FINISH();
1250 IEM_MC_END();
1251 }
1252 else
1253 {
1254 IEM_MC_BEGIN(3, 0);
1255 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1256 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1257 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1258
1259 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1260 IEM_MC_PREPARE_AVX_USAGE();
1261 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rr, iYRegDst, iYRegSrc);
1262
1263 IEM_MC_ADVANCE_RIP_AND_FINISH();
1264 IEM_MC_END();
1265 }
1266 }
1267 else
1268 {
1269 /*
1270 * Register, memory.
1271 */
1272 if (pVCpu->iem.s.uVexLength == 0)
1273 {
1274 IEM_MC_BEGIN(2, 2);
1275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1276 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1277 IEM_MC_ARG(uint64_t, uSrc, 1);
1278
1279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1280 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1281 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1282 IEM_MC_PREPARE_AVX_USAGE();
1283
1284 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1285 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1286 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1287 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1288
1289 IEM_MC_ADVANCE_RIP_AND_FINISH();
1290 IEM_MC_END();
1291 }
1292 else
1293 {
1294 IEM_MC_BEGIN(3, 2);
1295 IEM_MC_LOCAL(RTUINT256U, uSrc);
1296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1297 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1298 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1299 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1300
1301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1302 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1303 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1304 IEM_MC_PREPARE_AVX_USAGE();
1305
1306 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1307 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rm, iYRegDst, puSrc);
1308
1309 IEM_MC_ADVANCE_RIP_AND_FINISH();
1310 IEM_MC_END();
1311 }
1312 }
1313 return VINF_SUCCESS;
1314}
1315
1316
1317/**
1318 * @opcode 0x13
1319 * @opcodesub !11 mr/reg
1320 * @oppfx none
1321 * @opcpuid avx
1322 * @opgroup og_avx_simdfp_datamove
1323 * @opxcpttype 5
1324 * @optest op1=1 op2=2 -> op1=2
1325 * @optest op1=0 op2=-42 -> op1=-42
1326 */
1327FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1328{
1329 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1330 if (IEM_IS_MODRM_MEM_MODE(bRm))
1331 {
1332 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1333
1334 IEM_MC_BEGIN(0, 2);
1335 IEM_MC_LOCAL(uint64_t, uSrc);
1336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1337
1338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1339 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1340 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1341 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1342
1343 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1344 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1345
1346 IEM_MC_ADVANCE_RIP_AND_FINISH();
1347 IEM_MC_END();
1348 return VINF_SUCCESS;
1349 }
1350
1351 /**
1352 * @opdone
1353 * @opmnemonic udvex0f13m3
1354 * @opcode 0x13
1355 * @opcodesub 11 mr/reg
1356 * @oppfx none
1357 * @opunused immediate
1358 * @opcpuid avx
1359 * @optest ->
1360 */
1361 return IEMOP_RAISE_INVALID_OPCODE();
1362}
1363
1364
1365/**
1366 * @opcode 0x13
1367 * @opcodesub !11 mr/reg
1368 * @oppfx 0x66
1369 * @opcpuid avx
1370 * @opgroup og_avx_pcksclr_datamove
1371 * @opxcpttype 5
1372 * @optest op1=1 op2=2 -> op1=2
1373 * @optest op1=0 op2=-42 -> op1=-42
1374 */
1375FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1376{
1377 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1378 if (IEM_IS_MODRM_MEM_MODE(bRm))
1379 {
1380 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1381 IEM_MC_BEGIN(0, 2);
1382 IEM_MC_LOCAL(uint64_t, uSrc);
1383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1384
1385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1386 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1387 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1388 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1389
1390 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1391 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1392
1393 IEM_MC_ADVANCE_RIP_AND_FINISH();
1394 IEM_MC_END();
1395 return VINF_SUCCESS;
1396 }
1397
1398 /**
1399 * @opdone
1400 * @opmnemonic udvex660f13m3
1401 * @opcode 0x13
1402 * @opcodesub 11 mr/reg
1403 * @oppfx 0x66
1404 * @opunused immediate
1405 * @opcpuid avx
1406 * @optest ->
1407 */
1408 return IEMOP_RAISE_INVALID_OPCODE();
1409}
1410
1411/* Opcode VEX.F3.0F 0x13 - invalid */
1412/* Opcode VEX.F2.0F 0x13 - invalid */
1413
1414/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1415FNIEMOP_DEF(iemOp_vunpcklps_Vx_Hx_Wx)
1416{
1417 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPS, vunpcklps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
1418 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklps);
1419 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1420}
1421
1422
1423/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1424FNIEMOP_DEF(iemOp_vunpcklpd_Vx_Hx_Wx)
1425{
1426 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPD, vunpcklpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
1427 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklpd);
1428 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1429}
1430
1431
1432/* Opcode VEX.F3.0F 0x14 - invalid */
1433/* Opcode VEX.F2.0F 0x14 - invalid */
1434
1435
1436/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1437FNIEMOP_DEF(iemOp_vunpckhps_Vx_Hx_Wx)
1438{
1439 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPS, vunpckhps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
1440 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhps);
1441 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1442}
1443
1444
1445/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1446FNIEMOP_DEF(iemOp_vunpckhpd_Vx_Hx_Wx)
1447{
1448 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPD, vunpckhpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
1449 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhpd);
1450 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1451}
1452
1453
1454/* Opcode VEX.F3.0F 0x15 - invalid */
1455/* Opcode VEX.F2.0F 0x15 - invalid */
1456
1457
1458FNIEMOP_DEF(iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq)
1459{
1460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1461 if (IEM_IS_MODRM_REG_MODE(bRm))
1462 {
1463 /**
1464 * @opcode 0x16
1465 * @opcodesub 11 mr/reg
1466 * @oppfx none
1467 * @opcpuid avx
1468 * @opgroup og_avx_simdfp_datamerge
1469 * @opxcpttype 7LZ
1470 */
1471 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVLHPS, vmovlhps, Vq_WO, Hq, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1472
1473 IEMOP_HLP_DONE_VEX_DECODING_L0();
1474 IEM_MC_BEGIN(0, 0);
1475
1476 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1477 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1478 IEM_MC_MERGE_YREG_U64LO_U64LO_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1479 IEM_GET_MODRM_RM(pVCpu, bRm),
1480 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1481
1482 IEM_MC_ADVANCE_RIP_AND_FINISH();
1483 IEM_MC_END();
1484 }
1485 else
1486 {
1487 /**
1488 * @opdone
1489 * @opcode 0x16
1490 * @opcodesub !11 mr/reg
1491 * @oppfx none
1492 * @opcpuid avx
1493 * @opgroup og_avx_simdfp_datamove
1494 * @opxcpttype 5LZ
1495 * @opfunction iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq
1496 */
1497 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPS, vmovhps, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1498
1499 IEM_MC_BEGIN(0, 2);
1500 IEM_MC_LOCAL(uint64_t, uSrc);
1501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1502
1503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1504 IEMOP_HLP_DONE_VEX_DECODING_L0();
1505 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1506 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1507
1508 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1509 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1510 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1511 uSrc);
1512
1513 IEM_MC_ADVANCE_RIP_AND_FINISH();
1514 IEM_MC_END();
1515 }
1516 return VINF_SUCCESS;
1517}
1518
1519
1520/**
1521 * @opcode 0x16
1522 * @opcodesub !11 mr/reg
1523 * @oppfx 0x66
1524 * @opcpuid avx
1525 * @opgroup og_avx_pcksclr_datamerge
1526 * @opxcpttype 5LZ
1527 */
1528FNIEMOP_DEF(iemOp_vmovhpd_Vdq_Hq_Mq)
1529{
1530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1531 if (IEM_IS_MODRM_MEM_MODE(bRm))
1532 {
1533 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPD, vmovhpd, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1534
1535 IEM_MC_BEGIN(0, 2);
1536 IEM_MC_LOCAL(uint64_t, uSrc);
1537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1538
1539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1540 IEMOP_HLP_DONE_VEX_DECODING_L0();
1541 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1542 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1543
1544 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1545 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1546 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1547 uSrc);
1548
1549 IEM_MC_ADVANCE_RIP_AND_FINISH();
1550 IEM_MC_END();
1551 return VINF_SUCCESS;
1552 }
1553
1554 /**
1555 * @opdone
1556 * @opmnemonic udvex660f16m3
1557 * @opcode 0x12
1558 * @opcodesub 11 mr/reg
1559 * @oppfx 0x66
1560 * @opunused immediate
1561 * @opcpuid avx
1562 * @optest ->
1563 */
1564 return IEMOP_RAISE_INVALID_OPCODE();
1565}
1566
1567
1568/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1569/**
1570 * @opcode 0x16
1571 * @oppfx 0xf3
1572 * @opcpuid avx
1573 * @opgroup og_avx_pcksclr_datamove
1574 * @opxcpttype 4
1575 */
1576FNIEMOP_DEF(iemOp_vmovshdup_Vx_Wx)
1577{
1578 IEMOP_MNEMONIC2(VEX_RM, VMOVSHDUP, vmovshdup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1579 Assert(pVCpu->iem.s.uVexLength <= 1);
1580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1581 if (IEM_IS_MODRM_REG_MODE(bRm))
1582 {
1583 /*
1584 * Register, register.
1585 */
1586 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1587 if (pVCpu->iem.s.uVexLength == 0)
1588 {
1589 IEM_MC_BEGIN(2, 0);
1590 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1591 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1592
1593 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1594 IEM_MC_PREPARE_AVX_USAGE();
1595
1596 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1597 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1598 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1599 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1600
1601 IEM_MC_ADVANCE_RIP_AND_FINISH();
1602 IEM_MC_END();
1603 }
1604 else
1605 {
1606 IEM_MC_BEGIN(3, 0);
1607 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1608 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1609 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1610
1611 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1612 IEM_MC_PREPARE_AVX_USAGE();
1613 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovshdup_256_rr, iYRegDst, iYRegSrc);
1614
1615 IEM_MC_ADVANCE_RIP_AND_FINISH();
1616 IEM_MC_END();
1617 }
1618 }
1619 else
1620 {
1621 /*
1622 * Register, memory.
1623 */
1624 if (pVCpu->iem.s.uVexLength == 0)
1625 {
1626 IEM_MC_BEGIN(2, 2);
1627 IEM_MC_LOCAL(RTUINT128U, uSrc);
1628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1629 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1630 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1631
1632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1633 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1634 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1635 IEM_MC_PREPARE_AVX_USAGE();
1636
1637 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1638 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1639 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1640 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1641
1642 IEM_MC_ADVANCE_RIP_AND_FINISH();
1643 IEM_MC_END();
1644 }
1645 else
1646 {
1647 IEM_MC_BEGIN(3, 2);
1648 IEM_MC_LOCAL(RTUINT256U, uSrc);
1649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1650 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1651 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1652 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1653
1654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1655 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1656 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1657 IEM_MC_PREPARE_AVX_USAGE();
1658
1659 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1660 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovshdup_256_rm, iYRegDst, puSrc);
1661
1662 IEM_MC_ADVANCE_RIP_AND_FINISH();
1663 IEM_MC_END();
1664 }
1665 }
1666 return VINF_SUCCESS;
1667}
1668
1669
1670/* Opcode VEX.F2.0F 0x16 - invalid */
1671
1672
1673/**
1674 * @opcode 0x17
1675 * @opcodesub !11 mr/reg
1676 * @oppfx none
1677 * @opcpuid avx
1678 * @opgroup og_avx_simdfp_datamove
1679 * @opxcpttype 5
1680 */
1681FNIEMOP_DEF(iemOp_vmovhps_Mq_Vq)
1682{
1683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1684 if (IEM_IS_MODRM_MEM_MODE(bRm))
1685 {
1686 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPS, vmovhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1687
1688 IEM_MC_BEGIN(0, 2);
1689 IEM_MC_LOCAL(uint64_t, uSrc);
1690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1691
1692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1693 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1694 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1695 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1696
1697 IEM_MC_FETCH_YREG_2ND_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1698 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1699
1700 IEM_MC_ADVANCE_RIP_AND_FINISH();
1701 IEM_MC_END();
1702 return VINF_SUCCESS;
1703 }
1704
1705 /**
1706 * @opdone
1707 * @opmnemonic udvex0f17m3
1708 * @opcode 0x17
1709 * @opcodesub 11 mr/reg
1710 * @oppfx none
1711 * @opunused immediate
1712 * @opcpuid avx
1713 * @optest ->
1714 */
1715 return IEMOP_RAISE_INVALID_OPCODE();
1716}
1717
1718
1719/**
1720 * @opcode 0x17
1721 * @opcodesub !11 mr/reg
1722 * @oppfx 0x66
1723 * @opcpuid avx
1724 * @opgroup og_avx_pcksclr_datamove
1725 * @opxcpttype 5
1726 */
1727FNIEMOP_DEF(iemOp_vmovhpd_Mq_Vq)
1728{
1729 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1730 if (IEM_IS_MODRM_MEM_MODE(bRm))
1731 {
1732 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPD, vmovhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1733 IEM_MC_BEGIN(0, 2);
1734 IEM_MC_LOCAL(uint64_t, uSrc);
1735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1736
1737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1738 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1739 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1740 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1741
1742 IEM_MC_FETCH_YREG_2ND_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1743 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1744
1745 IEM_MC_ADVANCE_RIP_AND_FINISH();
1746 IEM_MC_END();
1747 return VINF_SUCCESS;
1748 }
1749
1750 /**
1751 * @opdone
1752 * @opmnemonic udvex660f17m3
1753 * @opcode 0x17
1754 * @opcodesub 11 mr/reg
1755 * @oppfx 0x66
1756 * @opunused immediate
1757 * @opcpuid avx
1758 * @optest ->
1759 */
1760 return IEMOP_RAISE_INVALID_OPCODE();
1761}
1762
1763
1764/* Opcode VEX.F3.0F 0x17 - invalid */
1765/* Opcode VEX.F2.0F 0x17 - invalid */
1766
1767
1768/* Opcode VEX.0F 0x18 - invalid */
1769/* Opcode VEX.0F 0x19 - invalid */
1770/* Opcode VEX.0F 0x1a - invalid */
1771/* Opcode VEX.0F 0x1b - invalid */
1772/* Opcode VEX.0F 0x1c - invalid */
1773/* Opcode VEX.0F 0x1d - invalid */
1774/* Opcode VEX.0F 0x1e - invalid */
1775/* Opcode VEX.0F 0x1f - invalid */
1776
1777/* Opcode VEX.0F 0x20 - invalid */
1778/* Opcode VEX.0F 0x21 - invalid */
1779/* Opcode VEX.0F 0x22 - invalid */
1780/* Opcode VEX.0F 0x23 - invalid */
1781/* Opcode VEX.0F 0x24 - invalid */
1782/* Opcode VEX.0F 0x25 - invalid */
1783/* Opcode VEX.0F 0x26 - invalid */
1784/* Opcode VEX.0F 0x27 - invalid */
1785
1786/**
1787 * @opcode 0x28
1788 * @oppfx none
1789 * @opcpuid avx
1790 * @opgroup og_avx_pcksclr_datamove
1791 * @opxcpttype 1
1792 * @optest op1=1 op2=2 -> op1=2
1793 * @optest op1=0 op2=-42 -> op1=-42
1794 * @note Almost identical to vmovapd.
1795 */
1796FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1797{
1798 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1800 Assert(pVCpu->iem.s.uVexLength <= 1);
1801 if (IEM_IS_MODRM_REG_MODE(bRm))
1802 {
1803 /*
1804 * Register, register.
1805 */
1806 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1807 IEM_MC_BEGIN(1, 0);
1808
1809 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1810 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1811 if (pVCpu->iem.s.uVexLength == 0)
1812 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1813 IEM_GET_MODRM_RM(pVCpu, bRm));
1814 else
1815 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1816 IEM_GET_MODRM_RM(pVCpu, bRm));
1817 IEM_MC_ADVANCE_RIP_AND_FINISH();
1818 IEM_MC_END();
1819 }
1820 else
1821 {
1822 /*
1823 * Register, memory.
1824 */
1825 if (pVCpu->iem.s.uVexLength == 0)
1826 {
1827 IEM_MC_BEGIN(0, 2);
1828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1829 IEM_MC_LOCAL(RTUINT128U, uSrc);
1830
1831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1832 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1833 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1834 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1835
1836 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1837 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1838
1839 IEM_MC_ADVANCE_RIP_AND_FINISH();
1840 IEM_MC_END();
1841 }
1842 else
1843 {
1844 IEM_MC_BEGIN(0, 2);
1845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1846 IEM_MC_LOCAL(RTUINT256U, uSrc);
1847
1848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1849 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1850 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1851 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1852
1853 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1854 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1855
1856 IEM_MC_ADVANCE_RIP_AND_FINISH();
1857 IEM_MC_END();
1858 }
1859 }
1860 return VINF_SUCCESS;
1861}
1862
1863
1864/**
1865 * @opcode 0x28
1866 * @oppfx 66
1867 * @opcpuid avx
1868 * @opgroup og_avx_pcksclr_datamove
1869 * @opxcpttype 1
1870 * @optest op1=1 op2=2 -> op1=2
1871 * @optest op1=0 op2=-42 -> op1=-42
1872 * @note Almost identical to vmovaps
1873 */
1874FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1875{
1876 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1877 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1878 Assert(pVCpu->iem.s.uVexLength <= 1);
1879 if (IEM_IS_MODRM_REG_MODE(bRm))
1880 {
1881 /*
1882 * Register, register.
1883 */
1884 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1885 IEM_MC_BEGIN(1, 0);
1886
1887 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1888 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1889 if (pVCpu->iem.s.uVexLength == 0)
1890 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1891 IEM_GET_MODRM_RM(pVCpu, bRm));
1892 else
1893 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1894 IEM_GET_MODRM_RM(pVCpu, bRm));
1895 IEM_MC_ADVANCE_RIP_AND_FINISH();
1896 IEM_MC_END();
1897 }
1898 else
1899 {
1900 /*
1901 * Register, memory.
1902 */
1903 if (pVCpu->iem.s.uVexLength == 0)
1904 {
1905 IEM_MC_BEGIN(0, 2);
1906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1907 IEM_MC_LOCAL(RTUINT128U, uSrc);
1908
1909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1910 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1911 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1912 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1913
1914 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1915 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1916
1917 IEM_MC_ADVANCE_RIP_AND_FINISH();
1918 IEM_MC_END();
1919 }
1920 else
1921 {
1922 IEM_MC_BEGIN(0, 2);
1923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1924 IEM_MC_LOCAL(RTUINT256U, uSrc);
1925
1926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1927 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1928 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1929 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1930
1931 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1932 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1933
1934 IEM_MC_ADVANCE_RIP_AND_FINISH();
1935 IEM_MC_END();
1936 }
1937 }
1938 return VINF_SUCCESS;
1939}
1940
1941/**
1942 * @opmnemonic udvexf30f28
1943 * @opcode 0x28
1944 * @oppfx 0xf3
1945 * @opunused vex.modrm
1946 * @opcpuid avx
1947 * @optest ->
1948 * @opdone
1949 */
1950
1951/**
1952 * @opmnemonic udvexf20f28
1953 * @opcode 0x28
1954 * @oppfx 0xf2
1955 * @opunused vex.modrm
1956 * @opcpuid avx
1957 * @optest ->
1958 * @opdone
1959 */
1960
1961/**
1962 * @opcode 0x29
1963 * @oppfx none
1964 * @opcpuid avx
1965 * @opgroup og_avx_pcksclr_datamove
1966 * @opxcpttype 1
1967 * @optest op1=1 op2=2 -> op1=2
1968 * @optest op1=0 op2=-42 -> op1=-42
1969 * @note Almost identical to vmovapd.
1970 */
1971FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1972{
1973 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1975 Assert(pVCpu->iem.s.uVexLength <= 1);
1976 if (IEM_IS_MODRM_REG_MODE(bRm))
1977 {
1978 /*
1979 * Register, register.
1980 */
1981 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1982 IEM_MC_BEGIN(1, 0);
1983
1984 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1985 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1986 if (pVCpu->iem.s.uVexLength == 0)
1987 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1988 IEM_GET_MODRM_REG(pVCpu, bRm));
1989 else
1990 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1991 IEM_GET_MODRM_REG(pVCpu, bRm));
1992 IEM_MC_ADVANCE_RIP_AND_FINISH();
1993 IEM_MC_END();
1994 }
1995 else
1996 {
1997 /*
1998 * Register, memory.
1999 */
2000 if (pVCpu->iem.s.uVexLength == 0)
2001 {
2002 IEM_MC_BEGIN(0, 2);
2003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2004 IEM_MC_LOCAL(RTUINT128U, uSrc);
2005
2006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2007 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2008 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2009 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2010
2011 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2012 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2013
2014 IEM_MC_ADVANCE_RIP_AND_FINISH();
2015 IEM_MC_END();
2016 }
2017 else
2018 {
2019 IEM_MC_BEGIN(0, 2);
2020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2021 IEM_MC_LOCAL(RTUINT256U, uSrc);
2022
2023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2024 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2025 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2026 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2027
2028 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2029 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2030
2031 IEM_MC_ADVANCE_RIP_AND_FINISH();
2032 IEM_MC_END();
2033 }
2034 }
2035 return VINF_SUCCESS;
2036}
2037
2038/**
2039 * @opcode 0x29
2040 * @oppfx 66
2041 * @opcpuid avx
2042 * @opgroup og_avx_pcksclr_datamove
2043 * @opxcpttype 1
2044 * @optest op1=1 op2=2 -> op1=2
2045 * @optest op1=0 op2=-42 -> op1=-42
2046 * @note Almost identical to vmovaps
2047 */
2048FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
2049{
2050 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2051 Assert(pVCpu->iem.s.uVexLength <= 1);
2052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2053 if (IEM_IS_MODRM_REG_MODE(bRm))
2054 {
2055 /*
2056 * Register, register.
2057 */
2058 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2059 IEM_MC_BEGIN(1, 0);
2060
2061 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2062 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2063 if (pVCpu->iem.s.uVexLength == 0)
2064 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2065 IEM_GET_MODRM_REG(pVCpu, bRm));
2066 else
2067 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2068 IEM_GET_MODRM_REG(pVCpu, bRm));
2069 IEM_MC_ADVANCE_RIP_AND_FINISH();
2070 IEM_MC_END();
2071 }
2072 else
2073 {
2074 /*
2075 * Register, memory.
2076 */
2077 if (pVCpu->iem.s.uVexLength == 0)
2078 {
2079 IEM_MC_BEGIN(0, 2);
2080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2081 IEM_MC_LOCAL(RTUINT128U, uSrc);
2082
2083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2084 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2085 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2086 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2087
2088 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2089 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2090
2091 IEM_MC_ADVANCE_RIP_AND_FINISH();
2092 IEM_MC_END();
2093 }
2094 else
2095 {
2096 IEM_MC_BEGIN(0, 2);
2097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2098 IEM_MC_LOCAL(RTUINT256U, uSrc);
2099
2100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2101 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2102 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2103 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2104
2105 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2106 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2107
2108 IEM_MC_ADVANCE_RIP_AND_FINISH();
2109 IEM_MC_END();
2110 }
2111 }
2112 return VINF_SUCCESS;
2113}
2114
2115
2116/**
2117 * @opmnemonic udvexf30f29
2118 * @opcode 0x29
2119 * @oppfx 0xf3
2120 * @opunused vex.modrm
2121 * @opcpuid avx
2122 * @optest ->
2123 * @opdone
2124 */
2125
2126/**
2127 * @opmnemonic udvexf20f29
2128 * @opcode 0x29
2129 * @oppfx 0xf2
2130 * @opunused vex.modrm
2131 * @opcpuid avx
2132 * @optest ->
2133 * @opdone
2134 */
2135
2136
2137/** Opcode VEX.0F 0x2a - invalid */
2138/** Opcode VEX.66.0F 0x2a - invalid */
2139/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
2140FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey);
2141/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2142FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey);
2143
2144
2145/**
2146 * @opcode 0x2b
2147 * @opcodesub !11 mr/reg
2148 * @oppfx none
2149 * @opcpuid avx
2150 * @opgroup og_avx_cachect
2151 * @opxcpttype 1
2152 * @optest op1=1 op2=2 -> op1=2
2153 * @optest op1=0 op2=-42 -> op1=-42
2154 * @note Identical implementation to vmovntpd
2155 */
2156FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
2157{
2158 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2159 Assert(pVCpu->iem.s.uVexLength <= 1);
2160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2161 if (IEM_IS_MODRM_MEM_MODE(bRm))
2162 {
2163 /*
2164 * memory, register.
2165 */
2166 if (pVCpu->iem.s.uVexLength == 0)
2167 {
2168 IEM_MC_BEGIN(0, 2);
2169 IEM_MC_LOCAL(RTUINT128U, uSrc);
2170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2171
2172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2173 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2174 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2175 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2176
2177 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2178 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2179
2180 IEM_MC_ADVANCE_RIP_AND_FINISH();
2181 IEM_MC_END();
2182 }
2183 else
2184 {
2185 IEM_MC_BEGIN(0, 2);
2186 IEM_MC_LOCAL(RTUINT256U, uSrc);
2187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2188
2189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2190 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2191 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2192 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2193
2194 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2195 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2196
2197 IEM_MC_ADVANCE_RIP_AND_FINISH();
2198 IEM_MC_END();
2199 }
2200 }
2201 /* The register, register encoding is invalid. */
2202 else
2203 return IEMOP_RAISE_INVALID_OPCODE();
2204 return VINF_SUCCESS;
2205}
2206
2207/**
2208 * @opcode 0x2b
2209 * @opcodesub !11 mr/reg
2210 * @oppfx 0x66
2211 * @opcpuid avx
2212 * @opgroup og_avx_cachect
2213 * @opxcpttype 1
2214 * @optest op1=1 op2=2 -> op1=2
2215 * @optest op1=0 op2=-42 -> op1=-42
2216 * @note Identical implementation to vmovntps
2217 */
2218FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
2219{
2220 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2221 Assert(pVCpu->iem.s.uVexLength <= 1);
2222 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2223 if (IEM_IS_MODRM_MEM_MODE(bRm))
2224 {
2225 /*
2226 * memory, register.
2227 */
2228 if (pVCpu->iem.s.uVexLength == 0)
2229 {
2230 IEM_MC_BEGIN(0, 2);
2231 IEM_MC_LOCAL(RTUINT128U, uSrc);
2232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2233
2234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2235 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2236 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2237 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2238
2239 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2240 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2241
2242 IEM_MC_ADVANCE_RIP_AND_FINISH();
2243 IEM_MC_END();
2244 }
2245 else
2246 {
2247 IEM_MC_BEGIN(0, 2);
2248 IEM_MC_LOCAL(RTUINT256U, uSrc);
2249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2250
2251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2252 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2253 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2254 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2255
2256 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2257 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2258
2259 IEM_MC_ADVANCE_RIP_AND_FINISH();
2260 IEM_MC_END();
2261 }
2262 }
2263 /* The register, register encoding is invalid. */
2264 else
2265 return IEMOP_RAISE_INVALID_OPCODE();
2266 return VINF_SUCCESS;
2267}
2268
2269/**
2270 * @opmnemonic udvexf30f2b
2271 * @opcode 0x2b
2272 * @oppfx 0xf3
2273 * @opunused vex.modrm
2274 * @opcpuid avx
2275 * @optest ->
2276 * @opdone
2277 */
2278
2279/**
2280 * @opmnemonic udvexf20f2b
2281 * @opcode 0x2b
2282 * @oppfx 0xf2
2283 * @opunused vex.modrm
2284 * @opcpuid avx
2285 * @optest ->
2286 * @opdone
2287 */
2288
2289
2290/* Opcode VEX.0F 0x2c - invalid */
2291/* Opcode VEX.66.0F 0x2c - invalid */
2292/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
2293FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2294/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
2295FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2296
2297/* Opcode VEX.0F 0x2d - invalid */
2298/* Opcode VEX.66.0F 0x2d - invalid */
2299/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
2300FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2301/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
2302FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2303
2304
2305/** Opcode VEX.0F 0x2e - vucomiss Vss, Wss */
2306FNIEMOP_DEF(iemOp_vucomiss_Vss_Wss)
2307{
2308 IEMOP_MNEMONIC2(RM, VUCOMISS, vucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2309 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2310 if (IEM_IS_MODRM_REG_MODE(bRm))
2311 {
2312 /*
2313 * Register, register.
2314 */
2315 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2316 IEM_MC_BEGIN(4, 1);
2317 IEM_MC_LOCAL(uint32_t, fEFlags);
2318 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2319 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2320 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2321 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2322 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2323 IEM_MC_PREPARE_AVX_USAGE();
2324 IEM_MC_FETCH_EFLAGS(fEFlags);
2325 IEM_MC_REF_MXCSR(pfMxcsr);
2326 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2327 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2328 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2329 pfMxcsr, pEFlags, puSrc1, puSrc2);
2330 IEM_MC_IF_MXCSR_XCPT_PENDING()
2331 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2332 IEM_MC_ELSE()
2333 IEM_MC_COMMIT_EFLAGS(fEFlags);
2334 IEM_MC_ENDIF();
2335
2336 IEM_MC_ADVANCE_RIP_AND_FINISH();
2337 IEM_MC_END();
2338 }
2339 else
2340 {
2341 /*
2342 * Register, memory.
2343 */
2344 IEM_MC_BEGIN(4, 3);
2345 IEM_MC_LOCAL(uint32_t, fEFlags);
2346 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2347 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2348 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2349 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2350 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2351 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2352
2353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2354 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2355 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2356 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2357
2358 IEM_MC_PREPARE_AVX_USAGE();
2359 IEM_MC_FETCH_EFLAGS(fEFlags);
2360 IEM_MC_REF_MXCSR(pfMxcsr);
2361 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2362 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2363 pfMxcsr, pEFlags, puSrc1, puSrc2);
2364 IEM_MC_IF_MXCSR_XCPT_PENDING()
2365 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2366 IEM_MC_ELSE()
2367 IEM_MC_COMMIT_EFLAGS(fEFlags);
2368 IEM_MC_ENDIF();
2369
2370 IEM_MC_ADVANCE_RIP_AND_FINISH();
2371 IEM_MC_END();
2372 }
2373 return VINF_SUCCESS;
2374}
2375
2376
2377/** Opcode VEX.66.0F 0x2e - vucomisd Vsd, Wsd */
2378FNIEMOP_DEF(iemOp_vucomisd_Vsd_Wsd)
2379{
2380 IEMOP_MNEMONIC2(RM, VUCOMISD, vucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2381 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2382 if (IEM_IS_MODRM_REG_MODE(bRm))
2383 {
2384 /*
2385 * Register, register.
2386 */
2387 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2388 IEM_MC_BEGIN(4, 1);
2389 IEM_MC_LOCAL(uint32_t, fEFlags);
2390 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2391 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2392 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2393 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2394 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2395 IEM_MC_PREPARE_AVX_USAGE();
2396 IEM_MC_FETCH_EFLAGS(fEFlags);
2397 IEM_MC_REF_MXCSR(pfMxcsr);
2398 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2399 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2400 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2401 pfMxcsr, pEFlags, puSrc1, puSrc2);
2402 IEM_MC_IF_MXCSR_XCPT_PENDING()
2403 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2404 IEM_MC_ELSE()
2405 IEM_MC_COMMIT_EFLAGS(fEFlags);
2406 IEM_MC_ENDIF();
2407
2408 IEM_MC_ADVANCE_RIP_AND_FINISH();
2409 IEM_MC_END();
2410 }
2411 else
2412 {
2413 /*
2414 * Register, memory.
2415 */
2416 IEM_MC_BEGIN(4, 3);
2417 IEM_MC_LOCAL(uint32_t, fEFlags);
2418 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2419 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2420 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2421 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2422 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2424
2425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2426 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2427 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2428 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2429
2430 IEM_MC_PREPARE_AVX_USAGE();
2431 IEM_MC_FETCH_EFLAGS(fEFlags);
2432 IEM_MC_REF_MXCSR(pfMxcsr);
2433 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2434 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2435 pfMxcsr, pEFlags, puSrc1, puSrc2);
2436 IEM_MC_IF_MXCSR_XCPT_PENDING()
2437 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2438 IEM_MC_ELSE()
2439 IEM_MC_COMMIT_EFLAGS(fEFlags);
2440 IEM_MC_ENDIF();
2441
2442 IEM_MC_ADVANCE_RIP_AND_FINISH();
2443 IEM_MC_END();
2444 }
2445 return VINF_SUCCESS;
2446}
2447
2448
2449/* Opcode VEX.F3.0F 0x2e - invalid */
2450/* Opcode VEX.F2.0F 0x2e - invalid */
2451
2452/** Opcode VEX.0F 0x2f - vcomiss Vss, Wss */
2453FNIEMOP_DEF(iemOp_vcomiss_Vss_Wss)
2454{
2455 IEMOP_MNEMONIC2(RM, VCOMISS, vcomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2457 if (IEM_IS_MODRM_REG_MODE(bRm))
2458 {
2459 /*
2460 * Register, register.
2461 */
2462 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2463 IEM_MC_BEGIN(4, 1);
2464 IEM_MC_LOCAL(uint32_t, fEFlags);
2465 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2466 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2467 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2468 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2469 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2470 IEM_MC_PREPARE_AVX_USAGE();
2471 IEM_MC_FETCH_EFLAGS(fEFlags);
2472 IEM_MC_REF_MXCSR(pfMxcsr);
2473 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2474 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2475 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2476 pfMxcsr, pEFlags, puSrc1, puSrc2);
2477 IEM_MC_IF_MXCSR_XCPT_PENDING()
2478 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2479 IEM_MC_ELSE()
2480 IEM_MC_COMMIT_EFLAGS(fEFlags);
2481 IEM_MC_ENDIF();
2482
2483 IEM_MC_ADVANCE_RIP_AND_FINISH();
2484 IEM_MC_END();
2485 }
2486 else
2487 {
2488 /*
2489 * Register, memory.
2490 */
2491 IEM_MC_BEGIN(4, 3);
2492 IEM_MC_LOCAL(uint32_t, fEFlags);
2493 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2494 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2495 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2496 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2497 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2499
2500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2501 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2502 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2503 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2504
2505 IEM_MC_PREPARE_AVX_USAGE();
2506 IEM_MC_FETCH_EFLAGS(fEFlags);
2507 IEM_MC_REF_MXCSR(pfMxcsr);
2508 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2509 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2510 pfMxcsr, pEFlags, puSrc1, puSrc2);
2511 IEM_MC_IF_MXCSR_XCPT_PENDING()
2512 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2513 IEM_MC_ELSE()
2514 IEM_MC_COMMIT_EFLAGS(fEFlags);
2515 IEM_MC_ENDIF();
2516
2517 IEM_MC_ADVANCE_RIP_AND_FINISH();
2518 IEM_MC_END();
2519 }
2520 return VINF_SUCCESS;
2521}
2522
2523
2524/** Opcode VEX.66.0F 0x2f - vcomisd Vsd, Wsd */
2525FNIEMOP_DEF(iemOp_vcomisd_Vsd_Wsd)
2526{
2527 IEMOP_MNEMONIC2(RM, VCOMISD, vcomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2528 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2529 if (IEM_IS_MODRM_REG_MODE(bRm))
2530 {
2531 /*
2532 * Register, register.
2533 */
2534 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2535 IEM_MC_BEGIN(4, 1);
2536 IEM_MC_LOCAL(uint32_t, fEFlags);
2537 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2538 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2539 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2540 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2541 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2542 IEM_MC_PREPARE_AVX_USAGE();
2543 IEM_MC_FETCH_EFLAGS(fEFlags);
2544 IEM_MC_REF_MXCSR(pfMxcsr);
2545 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2546 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2547 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2548 pfMxcsr, pEFlags, puSrc1, puSrc2);
2549 IEM_MC_IF_MXCSR_XCPT_PENDING()
2550 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2551 IEM_MC_ELSE()
2552 IEM_MC_COMMIT_EFLAGS(fEFlags);
2553 IEM_MC_ENDIF();
2554
2555 IEM_MC_ADVANCE_RIP_AND_FINISH();
2556 IEM_MC_END();
2557 }
2558 else
2559 {
2560 /*
2561 * Register, memory.
2562 */
2563 IEM_MC_BEGIN(4, 3);
2564 IEM_MC_LOCAL(uint32_t, fEFlags);
2565 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2566 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2567 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2568 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2569 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2571
2572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2573 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2574 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2575 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2576
2577 IEM_MC_PREPARE_AVX_USAGE();
2578 IEM_MC_FETCH_EFLAGS(fEFlags);
2579 IEM_MC_REF_MXCSR(pfMxcsr);
2580 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2581 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2582 pfMxcsr, pEFlags, puSrc1, puSrc2);
2583 IEM_MC_IF_MXCSR_XCPT_PENDING()
2584 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2585 IEM_MC_ELSE()
2586 IEM_MC_COMMIT_EFLAGS(fEFlags);
2587 IEM_MC_ENDIF();
2588
2589 IEM_MC_ADVANCE_RIP_AND_FINISH();
2590 IEM_MC_END();
2591 }
2592 return VINF_SUCCESS;
2593}
2594
2595
2596/* Opcode VEX.F3.0F 0x2f - invalid */
2597/* Opcode VEX.F2.0F 0x2f - invalid */
2598
2599/* Opcode VEX.0F 0x30 - invalid */
2600/* Opcode VEX.0F 0x31 - invalid */
2601/* Opcode VEX.0F 0x32 - invalid */
2602/* Opcode VEX.0F 0x33 - invalid */
2603/* Opcode VEX.0F 0x34 - invalid */
2604/* Opcode VEX.0F 0x35 - invalid */
2605/* Opcode VEX.0F 0x36 - invalid */
2606/* Opcode VEX.0F 0x37 - invalid */
2607/* Opcode VEX.0F 0x38 - invalid */
2608/* Opcode VEX.0F 0x39 - invalid */
2609/* Opcode VEX.0F 0x3a - invalid */
2610/* Opcode VEX.0F 0x3b - invalid */
2611/* Opcode VEX.0F 0x3c - invalid */
2612/* Opcode VEX.0F 0x3d - invalid */
2613/* Opcode VEX.0F 0x3e - invalid */
2614/* Opcode VEX.0F 0x3f - invalid */
2615/* Opcode VEX.0F 0x40 - invalid */
2616/* Opcode VEX.0F 0x41 - invalid */
2617/* Opcode VEX.0F 0x42 - invalid */
2618/* Opcode VEX.0F 0x43 - invalid */
2619/* Opcode VEX.0F 0x44 - invalid */
2620/* Opcode VEX.0F 0x45 - invalid */
2621/* Opcode VEX.0F 0x46 - invalid */
2622/* Opcode VEX.0F 0x47 - invalid */
2623/* Opcode VEX.0F 0x48 - invalid */
2624/* Opcode VEX.0F 0x49 - invalid */
2625/* Opcode VEX.0F 0x4a - invalid */
2626/* Opcode VEX.0F 0x4b - invalid */
2627/* Opcode VEX.0F 0x4c - invalid */
2628/* Opcode VEX.0F 0x4d - invalid */
2629/* Opcode VEX.0F 0x4e - invalid */
2630/* Opcode VEX.0F 0x4f - invalid */
2631
2632
2633/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
2634FNIEMOP_DEF(iemOp_vmovmskps_Gy_Ups)
2635{
2636 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPS, vmovmskps, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2637 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2638 if (IEM_IS_MODRM_REG_MODE(bRm))
2639 {
2640 /*
2641 * Register, register.
2642 */
2643 if (pVCpu->iem.s.uVexLength == 0)
2644 {
2645 IEMOP_HLP_DONE_VEX_DECODING();
2646 IEM_MC_BEGIN(2, 1);
2647 IEM_MC_LOCAL(uint8_t, u8Dst);
2648 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2649 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2650 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2651 IEM_MC_PREPARE_AVX_USAGE();
2652 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2653 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskps_u128, iemAImpl_vmovmskps_u128_fallback),
2654 pu8Dst, puSrc);
2655 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2656 IEM_MC_ADVANCE_RIP_AND_FINISH();
2657 IEM_MC_END();
2658 }
2659 else
2660 {
2661 IEMOP_HLP_DONE_VEX_DECODING();
2662 IEM_MC_BEGIN(2, 2);
2663 IEM_MC_LOCAL(uint8_t, u8Dst);
2664 IEM_MC_LOCAL(RTUINT256U, uSrc);
2665 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2666 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2667
2668 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
2669 IEM_MC_PREPARE_AVX_USAGE();
2670 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2671 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskps_u256, iemAImpl_vmovmskps_u256_fallback),
2672 pu8Dst, puSrc);
2673 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2674 IEM_MC_ADVANCE_RIP_AND_FINISH();
2675 IEM_MC_END();
2676 }
2677 return VINF_SUCCESS;
2678 }
2679
2680 /* No memory operand. */
2681 return IEMOP_RAISE_INVALID_OPCODE();
2682}
2683
2684
2685/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
2686FNIEMOP_DEF(iemOp_vmovmskpd_Gy_Upd)
2687{
2688{
2689 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPD, vmovmskpd, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2690 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2691 if (IEM_IS_MODRM_REG_MODE(bRm))
2692 {
2693 /*
2694 * Register, register.
2695 */
2696 if (pVCpu->iem.s.uVexLength == 0)
2697 {
2698 IEMOP_HLP_DONE_VEX_DECODING();
2699 IEM_MC_BEGIN(2, 1);
2700 IEM_MC_LOCAL(uint8_t, u8Dst);
2701 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2702 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2703 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2704 IEM_MC_PREPARE_AVX_USAGE();
2705 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2706 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskpd_u128, iemAImpl_vmovmskpd_u128_fallback),
2707 pu8Dst, puSrc);
2708 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2709 IEM_MC_ADVANCE_RIP_AND_FINISH();
2710 IEM_MC_END();
2711 }
2712 else
2713 {
2714 IEMOP_HLP_DONE_VEX_DECODING();
2715 IEM_MC_BEGIN(2, 2);
2716 IEM_MC_LOCAL(uint8_t, u8Dst);
2717 IEM_MC_LOCAL(RTUINT256U, uSrc);
2718 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2719 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2720
2721 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
2722 IEM_MC_PREPARE_AVX_USAGE();
2723 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2724 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskpd_u256, iemAImpl_vmovmskpd_u256_fallback),
2725 pu8Dst, puSrc);
2726 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2727 IEM_MC_ADVANCE_RIP_AND_FINISH();
2728 IEM_MC_END();
2729 }
2730 return VINF_SUCCESS;
2731 }
2732
2733 /* No memory operand. */
2734 return IEMOP_RAISE_INVALID_OPCODE();
2735}
2736}
2737
2738
2739/* Opcode VEX.F3.0F 0x50 - invalid */
2740/* Opcode VEX.F2.0F 0x50 - invalid */
2741
2742/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
2743FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2744/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
2745FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2746/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
2747FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2748/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2749FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2750
2751/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
2752FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2753/* Opcode VEX.66.0F 0x52 - invalid */
2754/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
2755FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2756/* Opcode VEX.F2.0F 0x52 - invalid */
2757
2758/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
2759FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2760/* Opcode VEX.66.0F 0x53 - invalid */
2761/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
2762FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2763/* Opcode VEX.F2.0F 0x53 - invalid */
2764
2765
2766/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
2767FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
2768{
2769 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2770 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2771 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2772}
2773
2774
2775/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
2776FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
2777{
2778 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2779 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2780 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2781}
2782
2783
2784/* Opcode VEX.F3.0F 0x54 - invalid */
2785/* Opcode VEX.F2.0F 0x54 - invalid */
2786
2787
2788/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
2789FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
2790{
2791 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2792 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2793 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2794}
2795
2796
2797/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
2798FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
2799{
2800 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2801 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2802 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2803}
2804
2805
2806/* Opcode VEX.F3.0F 0x55 - invalid */
2807/* Opcode VEX.F2.0F 0x55 - invalid */
2808
2809/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
2810FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
2811{
2812 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2813 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2814 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2815}
2816
2817
2818/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
2819FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
2820{
2821 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2822 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2823 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2824}
2825
2826
2827/* Opcode VEX.F3.0F 0x56 - invalid */
2828/* Opcode VEX.F2.0F 0x56 - invalid */
2829
2830
2831/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
2832FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
2833{
2834 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2835 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2836 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2837}
2838
2839
2840/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
2841FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
2842{
2843 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2844 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2845 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2846}
2847
2848
2849/* Opcode VEX.F3.0F 0x57 - invalid */
2850/* Opcode VEX.F2.0F 0x57 - invalid */
2851
2852/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
2853FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2854/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
2855FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2856/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
2857FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2858/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
2859FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2860
2861/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
2862FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2863/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
2864FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2865/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
2866FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2867/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
2868FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2869
2870/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
2871FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2872/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
2873FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2874/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
2875FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2876/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2877FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2878
2879/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
2880FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2881/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
2882FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2883/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
2884FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2885/* Opcode VEX.F2.0F 0x5b - invalid */
2886
2887/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
2888FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2889/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
2890FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2891/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
2892FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2893/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
2894FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2895
2896/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
2897FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2898/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
2899FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2900/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
2901FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2902/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
2903FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2904
2905/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
2906FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2907/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
2908FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2909/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
2910FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2911/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
2912FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2913
2914/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
2915FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2916/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
2917FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2918/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
2919FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2920/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
2921FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2922
2923
2924/* Opcode VEX.0F 0x60 - invalid */
2925
2926
2927/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
2928FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2929{
2930 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2931 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklbw);
2932 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2933}
2934
2935
2936/* Opcode VEX.F3.0F 0x60 - invalid */
2937
2938
2939/* Opcode VEX.0F 0x61 - invalid */
2940
2941
2942/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
2943FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2944{
2945 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2946 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklwd);
2947 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2948}
2949
2950
2951/* Opcode VEX.F3.0F 0x61 - invalid */
2952
2953
2954/* Opcode VEX.0F 0x62 - invalid */
2955
2956/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
2957FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2958{
2959 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2960 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckldq);
2961 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2962}
2963
2964
2965/* Opcode VEX.F3.0F 0x62 - invalid */
2966
2967
2968
2969/* Opcode VEX.0F 0x63 - invalid */
2970
2971
2972/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
2973FNIEMOP_DEF(iemOp_vpacksswb_Vx_Hx_Wx)
2974{
2975 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSWB, vpacksswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2976 IEMOPMEDIAOPTF3_INIT_VARS( vpacksswb);
2977 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2978}
2979
2980
2981/* Opcode VEX.F3.0F 0x63 - invalid */
2982
2983/* Opcode VEX.0F 0x64 - invalid */
2984
2985
2986/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
2987FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
2988{
2989 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2990 IEMOPMEDIAF3_INIT_VARS( vpcmpgtb);
2991 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2992}
2993
2994
2995/* Opcode VEX.F3.0F 0x64 - invalid */
2996
2997/* Opcode VEX.0F 0x65 - invalid */
2998
2999
3000/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
3001FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
3002{
3003 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3004 IEMOPMEDIAF3_INIT_VARS( vpcmpgtw);
3005 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3006}
3007
3008
3009/* Opcode VEX.F3.0F 0x65 - invalid */
3010
3011/* Opcode VEX.0F 0x66 - invalid */
3012
3013
3014/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
3015FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
3016{
3017 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3018 IEMOPMEDIAF3_INIT_VARS( vpcmpgtd);
3019 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3020}
3021
3022
3023/* Opcode VEX.F3.0F 0x66 - invalid */
3024
3025/* Opcode VEX.0F 0x67 - invalid */
3026
3027
3028/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
3029FNIEMOP_DEF(iemOp_vpackuswb_Vx_Hx_W)
3030{
3031 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSWB, vpackuswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3032 IEMOPMEDIAOPTF3_INIT_VARS( vpackuswb);
3033 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3034}
3035
3036
3037/* Opcode VEX.F3.0F 0x67 - invalid */
3038
3039
3040///**
3041// * Common worker for SSE2 instructions on the form:
3042// * pxxxx xmm1, xmm2/mem128
3043// *
3044// * The 2nd operand is the second half of a register, which in the memory case
3045// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3046// * where it may read the full 128 bits or only the upper 64 bits.
3047// *
3048// * Exceptions type 4.
3049// */
3050//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3051//{
3052// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3053// if (IEM_IS_MODRM_REG_MODE(bRm))
3054// {
3055// /*
3056// * Register, register.
3057// */
3058// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3059// IEM_MC_BEGIN(2, 0);
3060// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3061// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3062// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3063// IEM_MC_PREPARE_SSE_USAGE();
3064// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3065// IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3066// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3067// IEM_MC_ADVANCE_RIP_AND_FINISH();
3068// IEM_MC_END();
3069// }
3070// else
3071// {
3072// /*
3073// * Register, memory.
3074// */
3075// IEM_MC_BEGIN(2, 2);
3076// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3077// IEM_MC_LOCAL(RTUINT128U, uSrc);
3078// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3079// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3080//
3081// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3082// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3083// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3084// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3085//
3086// IEM_MC_PREPARE_SSE_USAGE();
3087// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3088// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3089//
3090// IEM_MC_ADVANCE_RIP_AND_FINISH();
3091// IEM_MC_END();
3092// }
3093// return VINF_SUCCESS;
3094//}
3095
3096
3097/* Opcode VEX.0F 0x68 - invalid */
3098
3099/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
3100FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
3101{
3102 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3103 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhbw);
3104 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3105}
3106
3107
3108/* Opcode VEX.F3.0F 0x68 - invalid */
3109
3110
3111/* Opcode VEX.0F 0x69 - invalid */
3112
3113
3114/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
3115FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
3116{
3117 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3118 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhwd);
3119 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3120}
3121
3122
3123/* Opcode VEX.F3.0F 0x69 - invalid */
3124
3125
3126/* Opcode VEX.0F 0x6a - invalid */
3127
3128
3129/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
3130FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
3131{
3132 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3133 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhdq);
3134 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3135}
3136
3137
3138/* Opcode VEX.F3.0F 0x6a - invalid */
3139
3140
3141/* Opcode VEX.0F 0x6b - invalid */
3142
3143
3144/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
3145FNIEMOP_DEF(iemOp_vpackssdw_Vx_Hx_Wx)
3146{
3147 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSDW, vpackssdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3148 IEMOPMEDIAOPTF3_INIT_VARS( vpackssdw);
3149 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3150}
3151
3152
3153/* Opcode VEX.F3.0F 0x6b - invalid */
3154
3155
3156/* Opcode VEX.0F 0x6c - invalid */
3157
3158
3159/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
3160FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
3161{
3162 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3163 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklqdq);
3164 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3165}
3166
3167
3168/* Opcode VEX.F3.0F 0x6c - invalid */
3169/* Opcode VEX.F2.0F 0x6c - invalid */
3170
3171
3172/* Opcode VEX.0F 0x6d - invalid */
3173
3174
3175/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
3176FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
3177{
3178 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3179 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhqdq);
3180 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3181}
3182
3183
3184/* Opcode VEX.F3.0F 0x6d - invalid */
3185
3186
3187/* Opcode VEX.0F 0x6e - invalid */
3188
3189FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
3190{
3191 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3192 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3193 {
3194 /**
3195 * @opcode 0x6e
3196 * @opcodesub rex.w=1
3197 * @oppfx 0x66
3198 * @opcpuid avx
3199 * @opgroup og_avx_simdint_datamov
3200 * @opxcpttype 5
3201 * @optest 64-bit / op1=1 op2=2 -> op1=2
3202 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3203 */
3204 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3205 if (IEM_IS_MODRM_REG_MODE(bRm))
3206 {
3207 /* XMM, greg64 */
3208 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3209 IEM_MC_BEGIN(0, 1);
3210 IEM_MC_LOCAL(uint64_t, u64Tmp);
3211
3212 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3213 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3214
3215 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3216 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3217
3218 IEM_MC_ADVANCE_RIP_AND_FINISH();
3219 IEM_MC_END();
3220 }
3221 else
3222 {
3223 /* XMM, [mem64] */
3224 IEM_MC_BEGIN(0, 2);
3225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3226 IEM_MC_LOCAL(uint64_t, u64Tmp);
3227
3228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3229 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3230 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3231 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3232
3233 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3234 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3235
3236 IEM_MC_ADVANCE_RIP_AND_FINISH();
3237 IEM_MC_END();
3238 }
3239 }
3240 else
3241 {
3242 /**
3243 * @opdone
3244 * @opcode 0x6e
3245 * @opcodesub rex.w=0
3246 * @oppfx 0x66
3247 * @opcpuid avx
3248 * @opgroup og_avx_simdint_datamov
3249 * @opxcpttype 5
3250 * @opfunction iemOp_vmovd_q_Vy_Ey
3251 * @optest op1=1 op2=2 -> op1=2
3252 * @optest op1=0 op2=-42 -> op1=-42
3253 */
3254 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3255 if (IEM_IS_MODRM_REG_MODE(bRm))
3256 {
3257 /* XMM, greg32 */
3258 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3259 IEM_MC_BEGIN(0, 1);
3260 IEM_MC_LOCAL(uint32_t, u32Tmp);
3261
3262 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3263 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3264
3265 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3266 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3267
3268 IEM_MC_ADVANCE_RIP_AND_FINISH();
3269 IEM_MC_END();
3270 }
3271 else
3272 {
3273 /* XMM, [mem32] */
3274 IEM_MC_BEGIN(0, 2);
3275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3276 IEM_MC_LOCAL(uint32_t, u32Tmp);
3277
3278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3279 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3280 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3281 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3282
3283 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3284 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3285
3286 IEM_MC_ADVANCE_RIP_AND_FINISH();
3287 IEM_MC_END();
3288 }
3289 }
3290 return VINF_SUCCESS;
3291}
3292
3293
3294/* Opcode VEX.F3.0F 0x6e - invalid */
3295
3296
3297/* Opcode VEX.0F 0x6f - invalid */
3298
3299/**
3300 * @opcode 0x6f
3301 * @oppfx 0x66
3302 * @opcpuid avx
3303 * @opgroup og_avx_simdint_datamove
3304 * @opxcpttype 1
3305 * @optest op1=1 op2=2 -> op1=2
3306 * @optest op1=0 op2=-42 -> op1=-42
3307 */
3308FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
3309{
3310 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3311 Assert(pVCpu->iem.s.uVexLength <= 1);
3312 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3313 if (IEM_IS_MODRM_REG_MODE(bRm))
3314 {
3315 /*
3316 * Register, register.
3317 */
3318 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3319 IEM_MC_BEGIN(0, 0);
3320
3321 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3322 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3323 if (pVCpu->iem.s.uVexLength == 0)
3324 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3325 IEM_GET_MODRM_RM(pVCpu, bRm));
3326 else
3327 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3328 IEM_GET_MODRM_RM(pVCpu, bRm));
3329 IEM_MC_ADVANCE_RIP_AND_FINISH();
3330 IEM_MC_END();
3331 }
3332 else if (pVCpu->iem.s.uVexLength == 0)
3333 {
3334 /*
3335 * Register, memory128.
3336 */
3337 IEM_MC_BEGIN(0, 2);
3338 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3340
3341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3342 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3343 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3344 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3345
3346 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3347 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3348
3349 IEM_MC_ADVANCE_RIP_AND_FINISH();
3350 IEM_MC_END();
3351 }
3352 else
3353 {
3354 /*
3355 * Register, memory256.
3356 */
3357 IEM_MC_BEGIN(0, 2);
3358 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3360
3361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3362 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3363 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3364 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3365
3366 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3367 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3368
3369 IEM_MC_ADVANCE_RIP_AND_FINISH();
3370 IEM_MC_END();
3371 }
3372 return VINF_SUCCESS;
3373}
3374
3375/**
3376 * @opcode 0x6f
3377 * @oppfx 0xf3
3378 * @opcpuid avx
3379 * @opgroup og_avx_simdint_datamove
3380 * @opxcpttype 4UA
3381 * @optest op1=1 op2=2 -> op1=2
3382 * @optest op1=0 op2=-42 -> op1=-42
3383 */
3384FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
3385{
3386 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3387 Assert(pVCpu->iem.s.uVexLength <= 1);
3388 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3389 if (IEM_IS_MODRM_REG_MODE(bRm))
3390 {
3391 /*
3392 * Register, register.
3393 */
3394 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3395 IEM_MC_BEGIN(0, 0);
3396
3397 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3398 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3399 if (pVCpu->iem.s.uVexLength == 0)
3400 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3401 IEM_GET_MODRM_RM(pVCpu, bRm));
3402 else
3403 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3404 IEM_GET_MODRM_RM(pVCpu, bRm));
3405 IEM_MC_ADVANCE_RIP_AND_FINISH();
3406 IEM_MC_END();
3407 }
3408 else if (pVCpu->iem.s.uVexLength == 0)
3409 {
3410 /*
3411 * Register, memory128.
3412 */
3413 IEM_MC_BEGIN(0, 2);
3414 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3415 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3416
3417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3418 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3419 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3420 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3421
3422 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3423 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3424
3425 IEM_MC_ADVANCE_RIP_AND_FINISH();
3426 IEM_MC_END();
3427 }
3428 else
3429 {
3430 /*
3431 * Register, memory256.
3432 */
3433 IEM_MC_BEGIN(0, 2);
3434 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3436
3437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3438 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3439 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3440 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3441
3442 IEM_MC_FETCH_MEM_U256(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3443 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3444
3445 IEM_MC_ADVANCE_RIP_AND_FINISH();
3446 IEM_MC_END();
3447 }
3448 return VINF_SUCCESS;
3449}
3450
3451
3452/* Opcode VEX.0F 0x70 - invalid */
3453
3454
3455/**
3456 * Common worker for AVX/AVX2 instructions on the forms:
3457 * - vpxxx xmm0, xmm2/mem128, imm8
3458 * - vpxxx ymm0, ymm2/mem256, imm8
3459 *
3460 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3461 */
3462FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3463{
3464 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3465 if (IEM_IS_MODRM_REG_MODE(bRm))
3466 {
3467 /*
3468 * Register, register.
3469 */
3470 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3471 if (pVCpu->iem.s.uVexLength)
3472 {
3473 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3474 IEM_MC_BEGIN(3, 2);
3475 IEM_MC_LOCAL(RTUINT256U, uDst);
3476 IEM_MC_LOCAL(RTUINT256U, uSrc);
3477 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3478 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3479 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3480 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3481 IEM_MC_PREPARE_AVX_USAGE();
3482 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3483 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bEvilArg);
3484 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3485 IEM_MC_ADVANCE_RIP_AND_FINISH();
3486 IEM_MC_END();
3487 }
3488 else
3489 {
3490 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3491 IEM_MC_BEGIN(3, 0);
3492 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3493 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3494 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3495 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3496 IEM_MC_PREPARE_AVX_USAGE();
3497 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3498 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3499 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bEvilArg);
3500 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3501 IEM_MC_ADVANCE_RIP_AND_FINISH();
3502 IEM_MC_END();
3503 }
3504 }
3505 else
3506 {
3507 /*
3508 * Register, memory.
3509 */
3510 if (pVCpu->iem.s.uVexLength)
3511 {
3512 IEM_MC_BEGIN(3, 3);
3513 IEM_MC_LOCAL(RTUINT256U, uDst);
3514 IEM_MC_LOCAL(RTUINT256U, uSrc);
3515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3516 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3517 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3518
3519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3520 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3521 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3522 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3523 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3524 IEM_MC_PREPARE_AVX_USAGE();
3525
3526 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3527 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bEvilArg);
3528 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3529
3530 IEM_MC_ADVANCE_RIP_AND_FINISH();
3531 IEM_MC_END();
3532 }
3533 else
3534 {
3535 IEM_MC_BEGIN(3, 1);
3536 IEM_MC_LOCAL(RTUINT128U, uSrc);
3537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3538 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3539 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3540
3541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3542 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3543 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3544 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3545 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3546 IEM_MC_PREPARE_AVX_USAGE();
3547
3548 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3549 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3550 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bEvilArg);
3551 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3552
3553 IEM_MC_ADVANCE_RIP_AND_FINISH();
3554 IEM_MC_END();
3555 }
3556 }
3557 return VINF_SUCCESS;
3558}
3559
3560
3561/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
3562FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3563{
3564 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3565 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
3566 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
3567
3568}
3569
3570
3571/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
3572FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3573{
3574 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3575 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
3576 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
3577
3578}
3579
3580
3581/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
3582FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3583{
3584 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3585 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
3586 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
3587}
3588
3589
3590/* Opcode VEX.0F 0x71 11/2 - invalid. */
3591/** Opcode VEX.66.0F 0x71 11/2. */
3592FNIEMOP_STUB_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
3593
3594/* Opcode VEX.0F 0x71 11/4 - invalid */
3595/** Opcode VEX.66.0F 0x71 11/4. */
3596FNIEMOP_STUB_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
3597
3598/* Opcode VEX.0F 0x71 11/6 - invalid */
3599/** Opcode VEX.66.0F 0x71 11/6. */
3600FNIEMOP_STUB_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
3601
3602
3603/**
3604 * VEX Group 12 jump table for register variant.
3605 */
3606IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
3607{
3608 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3609 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3610 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3611 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3612 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3613 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3614 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3615 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3616};
3617AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
3618
3619
3620/** Opcode VEX.0F 0x71. */
3621FNIEMOP_DEF(iemOp_VGrp12)
3622{
3623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3624 if (IEM_IS_MODRM_REG_MODE(bRm))
3625 /* register, register */
3626 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3627 + pVCpu->iem.s.idxPrefix], bRm);
3628 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3629}
3630
3631
3632/* Opcode VEX.0F 0x72 11/2 - invalid. */
3633/** Opcode VEX.66.0F 0x72 11/2. */
3634FNIEMOP_STUB_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
3635
3636/* Opcode VEX.0F 0x72 11/4 - invalid. */
3637/** Opcode VEX.66.0F 0x72 11/4. */
3638FNIEMOP_STUB_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
3639
3640/* Opcode VEX.0F 0x72 11/6 - invalid. */
3641/** Opcode VEX.66.0F 0x72 11/6. */
3642FNIEMOP_STUB_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
3643
3644
3645/**
3646 * Group 13 jump table for register variant.
3647 */
3648IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
3649{
3650 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3651 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3652 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3653 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3654 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3655 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3656 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3657 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3658};
3659AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
3660
3661/** Opcode VEX.0F 0x72. */
3662FNIEMOP_DEF(iemOp_VGrp13)
3663{
3664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3665 if (IEM_IS_MODRM_REG_MODE(bRm))
3666 /* register, register */
3667 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3668 + pVCpu->iem.s.idxPrefix], bRm);
3669 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3670}
3671
3672
3673/* Opcode VEX.0F 0x73 11/2 - invalid. */
3674/** Opcode VEX.66.0F 0x73 11/2. */
3675FNIEMOP_STUB_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
3676
3677/** Opcode VEX.66.0F 0x73 11/3. */
3678FNIEMOP_STUB_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm);
3679
3680/* Opcode VEX.0F 0x73 11/6 - invalid. */
3681/** Opcode VEX.66.0F 0x73 11/6. */
3682FNIEMOP_STUB_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm);
3683
3684/** Opcode VEX.66.0F 0x73 11/7. */
3685FNIEMOP_STUB_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm);
3686
3687/**
3688 * Group 14 jump table for register variant.
3689 */
3690IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
3691{
3692 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3693 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3694 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3695 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3696 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3697 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3698 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3699 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3700};
3701AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
3702
3703
3704/** Opcode VEX.0F 0x73. */
3705FNIEMOP_DEF(iemOp_VGrp14)
3706{
3707 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3708 if (IEM_IS_MODRM_REG_MODE(bRm))
3709 /* register, register */
3710 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3711 + pVCpu->iem.s.idxPrefix], bRm);
3712 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3713}
3714
3715
3716/* Opcode VEX.0F 0x74 - invalid */
3717
3718
3719/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
3720FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3721{
3722 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3723 IEMOPMEDIAF3_INIT_VARS( vpcmpeqb);
3724 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3725}
3726
3727/* Opcode VEX.F3.0F 0x74 - invalid */
3728/* Opcode VEX.F2.0F 0x74 - invalid */
3729
3730
3731/* Opcode VEX.0F 0x75 - invalid */
3732
3733
3734/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
3735FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3736{
3737 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3738 IEMOPMEDIAF3_INIT_VARS( vpcmpeqw);
3739 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3740}
3741
3742
3743/* Opcode VEX.F3.0F 0x75 - invalid */
3744/* Opcode VEX.F2.0F 0x75 - invalid */
3745
3746
3747/* Opcode VEX.0F 0x76 - invalid */
3748
3749
3750/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
3751FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3752{
3753 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3754 IEMOPMEDIAF3_INIT_VARS( vpcmpeqd);
3755 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3756}
3757
3758
3759/* Opcode VEX.F3.0F 0x76 - invalid */
3760/* Opcode VEX.F2.0F 0x76 - invalid */
3761
3762
3763/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
3764FNIEMOP_DEF(iemOp_vzeroupperv__vzeroallv)
3765{
3766 Assert(pVCpu->iem.s.uVexLength <= 1);
3767 if (pVCpu->iem.s.uVexLength == 0)
3768 {
3769 /*
3770 * 128-bit: vzeroupper
3771 */
3772 IEMOP_MNEMONIC(vzeroupper, "vzeroupper");
3773 IEM_MC_BEGIN(0, 0);
3774
3775 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3776 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3777 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3778
3779 IEM_MC_CLEAR_YREG_128_UP(0);
3780 IEM_MC_CLEAR_YREG_128_UP(1);
3781 IEM_MC_CLEAR_YREG_128_UP(2);
3782 IEM_MC_CLEAR_YREG_128_UP(3);
3783 IEM_MC_CLEAR_YREG_128_UP(4);
3784 IEM_MC_CLEAR_YREG_128_UP(5);
3785 IEM_MC_CLEAR_YREG_128_UP(6);
3786 IEM_MC_CLEAR_YREG_128_UP(7);
3787
3788 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
3789 {
3790 IEM_MC_CLEAR_YREG_128_UP( 8);
3791 IEM_MC_CLEAR_YREG_128_UP( 9);
3792 IEM_MC_CLEAR_YREG_128_UP(10);
3793 IEM_MC_CLEAR_YREG_128_UP(11);
3794 IEM_MC_CLEAR_YREG_128_UP(12);
3795 IEM_MC_CLEAR_YREG_128_UP(13);
3796 IEM_MC_CLEAR_YREG_128_UP(14);
3797 IEM_MC_CLEAR_YREG_128_UP(15);
3798 }
3799
3800 IEM_MC_ADVANCE_RIP_AND_FINISH();
3801 IEM_MC_END();
3802 }
3803 else
3804 {
3805 /*
3806 * 256-bit: vzeroall
3807 */
3808 IEMOP_MNEMONIC(vzeroall, "vzeroall");
3809 IEM_MC_BEGIN(0, 1);
3810 IEM_MC_LOCAL(uint32_t, uZero);
3811
3812 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3813 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3814 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3815
3816 IEM_MC_ASSIGN(uZero, 0);
3817 IEM_MC_STORE_YREG_U32_ZX_VLMAX(0, uZero);
3818 IEM_MC_STORE_YREG_U32_ZX_VLMAX(1, uZero);
3819 IEM_MC_STORE_YREG_U32_ZX_VLMAX(2, uZero);
3820 IEM_MC_STORE_YREG_U32_ZX_VLMAX(3, uZero);
3821 IEM_MC_STORE_YREG_U32_ZX_VLMAX(4, uZero);
3822 IEM_MC_STORE_YREG_U32_ZX_VLMAX(5, uZero);
3823 IEM_MC_STORE_YREG_U32_ZX_VLMAX(6, uZero);
3824 IEM_MC_STORE_YREG_U32_ZX_VLMAX(7, uZero);
3825
3826 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
3827 {
3828 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 8, uZero);
3829 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 9, uZero);
3830 IEM_MC_STORE_YREG_U32_ZX_VLMAX(10, uZero);
3831 IEM_MC_STORE_YREG_U32_ZX_VLMAX(11, uZero);
3832 IEM_MC_STORE_YREG_U32_ZX_VLMAX(12, uZero);
3833 IEM_MC_STORE_YREG_U32_ZX_VLMAX(13, uZero);
3834 IEM_MC_STORE_YREG_U32_ZX_VLMAX(14, uZero);
3835 IEM_MC_STORE_YREG_U32_ZX_VLMAX(15, uZero);
3836 }
3837
3838 IEM_MC_ADVANCE_RIP_AND_FINISH();
3839 IEM_MC_END();
3840 }
3841 return VINF_SUCCESS;
3842}
3843
3844
3845/* Opcode VEX.66.0F 0x77 - invalid */
3846/* Opcode VEX.F3.0F 0x77 - invalid */
3847/* Opcode VEX.F2.0F 0x77 - invalid */
3848
3849/* Opcode VEX.0F 0x78 - invalid */
3850/* Opcode VEX.66.0F 0x78 - invalid */
3851/* Opcode VEX.F3.0F 0x78 - invalid */
3852/* Opcode VEX.F2.0F 0x78 - invalid */
3853
3854/* Opcode VEX.0F 0x79 - invalid */
3855/* Opcode VEX.66.0F 0x79 - invalid */
3856/* Opcode VEX.F3.0F 0x79 - invalid */
3857/* Opcode VEX.F2.0F 0x79 - invalid */
3858
3859/* Opcode VEX.0F 0x7a - invalid */
3860/* Opcode VEX.66.0F 0x7a - invalid */
3861/* Opcode VEX.F3.0F 0x7a - invalid */
3862/* Opcode VEX.F2.0F 0x7a - invalid */
3863
3864/* Opcode VEX.0F 0x7b - invalid */
3865/* Opcode VEX.66.0F 0x7b - invalid */
3866/* Opcode VEX.F3.0F 0x7b - invalid */
3867/* Opcode VEX.F2.0F 0x7b - invalid */
3868
3869/* Opcode VEX.0F 0x7c - invalid */
3870/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
3871FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3872/* Opcode VEX.F3.0F 0x7c - invalid */
3873/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
3874FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3875
3876/* Opcode VEX.0F 0x7d - invalid */
3877/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
3878FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3879/* Opcode VEX.F3.0F 0x7d - invalid */
3880/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
3881FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3882
3883
3884/* Opcode VEX.0F 0x7e - invalid */
3885
3886FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3887{
3888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3889 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3890 {
3891 /**
3892 * @opcode 0x7e
3893 * @opcodesub rex.w=1
3894 * @oppfx 0x66
3895 * @opcpuid avx
3896 * @opgroup og_avx_simdint_datamov
3897 * @opxcpttype 5
3898 * @optest 64-bit / op1=1 op2=2 -> op1=2
3899 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3900 */
3901 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3902 if (IEM_IS_MODRM_REG_MODE(bRm))
3903 {
3904 /* greg64, XMM */
3905 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3906 IEM_MC_BEGIN(0, 1);
3907 IEM_MC_LOCAL(uint64_t, u64Tmp);
3908
3909 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3910 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3911
3912 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3913 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
3914
3915 IEM_MC_ADVANCE_RIP_AND_FINISH();
3916 IEM_MC_END();
3917 }
3918 else
3919 {
3920 /* [mem64], XMM */
3921 IEM_MC_BEGIN(0, 2);
3922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3923 IEM_MC_LOCAL(uint64_t, u64Tmp);
3924
3925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3926 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3927 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3928 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3929
3930 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3931 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3932
3933 IEM_MC_ADVANCE_RIP_AND_FINISH();
3934 IEM_MC_END();
3935 }
3936 }
3937 else
3938 {
3939 /**
3940 * @opdone
3941 * @opcode 0x7e
3942 * @opcodesub rex.w=0
3943 * @oppfx 0x66
3944 * @opcpuid avx
3945 * @opgroup og_avx_simdint_datamov
3946 * @opxcpttype 5
3947 * @opfunction iemOp_vmovd_q_Vy_Ey
3948 * @optest op1=1 op2=2 -> op1=2
3949 * @optest op1=0 op2=-42 -> op1=-42
3950 */
3951 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3952 if (IEM_IS_MODRM_REG_MODE(bRm))
3953 {
3954 /* greg32, XMM */
3955 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3956 IEM_MC_BEGIN(0, 1);
3957 IEM_MC_LOCAL(uint32_t, u32Tmp);
3958
3959 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3960 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3961
3962 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3963 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
3964
3965 IEM_MC_ADVANCE_RIP_AND_FINISH();
3966 IEM_MC_END();
3967 }
3968 else
3969 {
3970 /* [mem32], XMM */
3971 IEM_MC_BEGIN(0, 2);
3972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3973 IEM_MC_LOCAL(uint32_t, u32Tmp);
3974
3975 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3976 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3977 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3978 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3979
3980 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3981 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3982
3983 IEM_MC_ADVANCE_RIP_AND_FINISH();
3984 IEM_MC_END();
3985 }
3986 }
3987 return VINF_SUCCESS;
3988}
3989
3990/**
3991 * @opcode 0x7e
3992 * @oppfx 0xf3
3993 * @opcpuid avx
3994 * @opgroup og_avx_pcksclr_datamove
3995 * @opxcpttype none
3996 * @optest op1=1 op2=2 -> op1=2
3997 * @optest op1=0 op2=-42 -> op1=-42
3998 */
3999FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
4000{
4001 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4002 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4003 if (IEM_IS_MODRM_REG_MODE(bRm))
4004 {
4005 /*
4006 * Register, register.
4007 */
4008 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
4009 IEM_MC_BEGIN(0, 0);
4010
4011 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4012 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4013
4014 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4015 IEM_GET_MODRM_RM(pVCpu, bRm));
4016 IEM_MC_ADVANCE_RIP_AND_FINISH();
4017 IEM_MC_END();
4018 }
4019 else
4020 {
4021 /*
4022 * Memory, register.
4023 */
4024 IEM_MC_BEGIN(0, 2);
4025 IEM_MC_LOCAL(uint64_t, uSrc);
4026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4027
4028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4029 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
4030 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4031 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4032
4033 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4034 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
4035
4036 IEM_MC_ADVANCE_RIP_AND_FINISH();
4037 IEM_MC_END();
4038 }
4039 return VINF_SUCCESS;
4040
4041}
4042/* Opcode VEX.F2.0F 0x7e - invalid */
4043
4044
4045/* Opcode VEX.0F 0x7f - invalid */
4046
4047/**
4048 * @opcode 0x7f
4049 * @oppfx 0x66
4050 * @opcpuid avx
4051 * @opgroup og_avx_simdint_datamove
4052 * @opxcpttype 1
4053 * @optest op1=1 op2=2 -> op1=2
4054 * @optest op1=0 op2=-42 -> op1=-42
4055 */
4056FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
4057{
4058 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4059 Assert(pVCpu->iem.s.uVexLength <= 1);
4060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4061 if (IEM_IS_MODRM_REG_MODE(bRm))
4062 {
4063 /*
4064 * Register, register.
4065 */
4066 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4067 IEM_MC_BEGIN(0, 0);
4068
4069 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4070 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4071 if (pVCpu->iem.s.uVexLength == 0)
4072 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4073 IEM_GET_MODRM_REG(pVCpu, bRm));
4074 else
4075 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4076 IEM_GET_MODRM_REG(pVCpu, bRm));
4077 IEM_MC_ADVANCE_RIP_AND_FINISH();
4078 IEM_MC_END();
4079 }
4080 else if (pVCpu->iem.s.uVexLength == 0)
4081 {
4082 /*
4083 * Register, memory128.
4084 */
4085 IEM_MC_BEGIN(0, 2);
4086 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4088
4089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4090 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4091 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4092 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4093
4094 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4095 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4096
4097 IEM_MC_ADVANCE_RIP_AND_FINISH();
4098 IEM_MC_END();
4099 }
4100 else
4101 {
4102 /*
4103 * Register, memory256.
4104 */
4105 IEM_MC_BEGIN(0, 2);
4106 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4108
4109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4110 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4111 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4112 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4113
4114 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4115 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4116
4117 IEM_MC_ADVANCE_RIP_AND_FINISH();
4118 IEM_MC_END();
4119 }
4120 return VINF_SUCCESS;
4121}
4122
4123/**
4124 * @opcode 0x7f
4125 * @oppfx 0xf3
4126 * @opcpuid avx
4127 * @opgroup og_avx_simdint_datamove
4128 * @opxcpttype 4UA
4129 * @optest op1=1 op2=2 -> op1=2
4130 * @optest op1=0 op2=-42 -> op1=-42
4131 */
4132FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
4133{
4134 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4135 Assert(pVCpu->iem.s.uVexLength <= 1);
4136 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4137 if (IEM_IS_MODRM_REG_MODE(bRm))
4138 {
4139 /*
4140 * Register, register.
4141 */
4142 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4143 IEM_MC_BEGIN(0, 0);
4144
4145 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4146 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4147 if (pVCpu->iem.s.uVexLength == 0)
4148 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4149 IEM_GET_MODRM_REG(pVCpu, bRm));
4150 else
4151 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4152 IEM_GET_MODRM_REG(pVCpu, bRm));
4153 IEM_MC_ADVANCE_RIP_AND_FINISH();
4154 IEM_MC_END();
4155 }
4156 else if (pVCpu->iem.s.uVexLength == 0)
4157 {
4158 /*
4159 * Register, memory128.
4160 */
4161 IEM_MC_BEGIN(0, 2);
4162 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4164
4165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4166 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4167 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4168 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4169
4170 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4171 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4172
4173 IEM_MC_ADVANCE_RIP_AND_FINISH();
4174 IEM_MC_END();
4175 }
4176 else
4177 {
4178 /*
4179 * Register, memory256.
4180 */
4181 IEM_MC_BEGIN(0, 2);
4182 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4184
4185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4186 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4187 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4188 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4189
4190 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4191 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4192
4193 IEM_MC_ADVANCE_RIP_AND_FINISH();
4194 IEM_MC_END();
4195 }
4196 return VINF_SUCCESS;
4197}
4198
4199/* Opcode VEX.F2.0F 0x7f - invalid */
4200
4201
4202/* Opcode VEX.0F 0x80 - invalid */
4203/* Opcode VEX.0F 0x81 - invalid */
4204/* Opcode VEX.0F 0x82 - invalid */
4205/* Opcode VEX.0F 0x83 - invalid */
4206/* Opcode VEX.0F 0x84 - invalid */
4207/* Opcode VEX.0F 0x85 - invalid */
4208/* Opcode VEX.0F 0x86 - invalid */
4209/* Opcode VEX.0F 0x87 - invalid */
4210/* Opcode VEX.0F 0x88 - invalid */
4211/* Opcode VEX.0F 0x89 - invalid */
4212/* Opcode VEX.0F 0x8a - invalid */
4213/* Opcode VEX.0F 0x8b - invalid */
4214/* Opcode VEX.0F 0x8c - invalid */
4215/* Opcode VEX.0F 0x8d - invalid */
4216/* Opcode VEX.0F 0x8e - invalid */
4217/* Opcode VEX.0F 0x8f - invalid */
4218/* Opcode VEX.0F 0x90 - invalid */
4219/* Opcode VEX.0F 0x91 - invalid */
4220/* Opcode VEX.0F 0x92 - invalid */
4221/* Opcode VEX.0F 0x93 - invalid */
4222/* Opcode VEX.0F 0x94 - invalid */
4223/* Opcode VEX.0F 0x95 - invalid */
4224/* Opcode VEX.0F 0x96 - invalid */
4225/* Opcode VEX.0F 0x97 - invalid */
4226/* Opcode VEX.0F 0x98 - invalid */
4227/* Opcode VEX.0F 0x99 - invalid */
4228/* Opcode VEX.0F 0x9a - invalid */
4229/* Opcode VEX.0F 0x9b - invalid */
4230/* Opcode VEX.0F 0x9c - invalid */
4231/* Opcode VEX.0F 0x9d - invalid */
4232/* Opcode VEX.0F 0x9e - invalid */
4233/* Opcode VEX.0F 0x9f - invalid */
4234/* Opcode VEX.0F 0xa0 - invalid */
4235/* Opcode VEX.0F 0xa1 - invalid */
4236/* Opcode VEX.0F 0xa2 - invalid */
4237/* Opcode VEX.0F 0xa3 - invalid */
4238/* Opcode VEX.0F 0xa4 - invalid */
4239/* Opcode VEX.0F 0xa5 - invalid */
4240/* Opcode VEX.0F 0xa6 - invalid */
4241/* Opcode VEX.0F 0xa7 - invalid */
4242/* Opcode VEX.0F 0xa8 - invalid */
4243/* Opcode VEX.0F 0xa9 - invalid */
4244/* Opcode VEX.0F 0xaa - invalid */
4245/* Opcode VEX.0F 0xab - invalid */
4246/* Opcode VEX.0F 0xac - invalid */
4247/* Opcode VEX.0F 0xad - invalid */
4248
4249
4250/* Opcode VEX.0F 0xae mem/0 - invalid. */
4251/* Opcode VEX.0F 0xae mem/1 - invalid. */
4252
4253/**
4254 * @ opmaps grp15
4255 * @ opcode !11/2
4256 * @ oppfx none
4257 * @ opcpuid sse
4258 * @ opgroup og_sse_mxcsrsm
4259 * @ opxcpttype 5
4260 * @ optest op1=0 -> mxcsr=0
4261 * @ optest op1=0x2083 -> mxcsr=0x2083
4262 * @ optest op1=0xfffffffe -> value.xcpt=0xd
4263 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
4264 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
4265 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
4266 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
4267 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
4268 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4269 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4270 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4271 */
4272FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
4273//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
4274//{
4275// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4276// if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
4277// return IEMOP_RAISE_INVALID_OPCODE();
4278//
4279// IEM_MC_BEGIN(2, 0);
4280// IEM_MC_ARG(uint8_t, iEffSeg, 0);
4281// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4282// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4283// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4284// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4285// IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4286// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
4287// IEM_MC_END();
4288// return VINF_SUCCESS;
4289//}
4290
4291
4292/**
4293 * @opmaps vexgrp15
4294 * @opcode !11/3
4295 * @oppfx none
4296 * @opcpuid avx
4297 * @opgroup og_avx_mxcsrsm
4298 * @opxcpttype 5
4299 * @optest mxcsr=0 -> op1=0
4300 * @optest mxcsr=0x2083 -> op1=0x2083
4301 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
4302 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
4303 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
4304 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
4305 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
4306 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
4307 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
4308 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
4309 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
4310 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
4311 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4312 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
4313 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4314 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
4315 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4316 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
4317 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
4318 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
4319 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
4320 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
4321 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
4322 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
4323 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
4324 * -> value.xcpt=0x6
4325 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
4326 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
4327 * APMv4 rev 3.17 page 509.
4328 * @todo Test this instruction on AMD Ryzen.
4329 */
4330FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
4331{
4332 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4333 IEM_MC_BEGIN(2, 0);
4334 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4335 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4336 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4337 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
4338 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4339 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4340 IEM_MC_CALL_CIMPL_2(iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
4341 IEM_MC_END();
4342 return VINF_SUCCESS;
4343}
4344
4345/* Opcode VEX.0F 0xae mem/4 - invalid. */
4346/* Opcode VEX.0F 0xae mem/5 - invalid. */
4347/* Opcode VEX.0F 0xae mem/6 - invalid. */
4348/* Opcode VEX.0F 0xae mem/7 - invalid. */
4349
4350/* Opcode VEX.0F 0xae 11b/0 - invalid. */
4351/* Opcode VEX.0F 0xae 11b/1 - invalid. */
4352/* Opcode VEX.0F 0xae 11b/2 - invalid. */
4353/* Opcode VEX.0F 0xae 11b/3 - invalid. */
4354/* Opcode VEX.0F 0xae 11b/4 - invalid. */
4355/* Opcode VEX.0F 0xae 11b/5 - invalid. */
4356/* Opcode VEX.0F 0xae 11b/6 - invalid. */
4357/* Opcode VEX.0F 0xae 11b/7 - invalid. */
4358
4359/**
4360 * Vex group 15 jump table for memory variant.
4361 */
4362IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
4363{ /* pfx: none, 066h, 0f3h, 0f2h */
4364 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4365 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4366 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4367 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4368 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4369 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4370 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4371 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4372};
4373AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
4374
4375
4376/** Opcode vex. 0xae. */
4377FNIEMOP_DEF(iemOp_VGrp15)
4378{
4379 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4380 if (IEM_IS_MODRM_REG_MODE(bRm))
4381 /* register, register */
4382 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
4383
4384 /* memory, register */
4385 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4386 + pVCpu->iem.s.idxPrefix], bRm);
4387}
4388
4389
4390/* Opcode VEX.0F 0xaf - invalid. */
4391
4392/* Opcode VEX.0F 0xb0 - invalid. */
4393/* Opcode VEX.0F 0xb1 - invalid. */
4394/* Opcode VEX.0F 0xb2 - invalid. */
4395/* Opcode VEX.0F 0xb2 - invalid. */
4396/* Opcode VEX.0F 0xb3 - invalid. */
4397/* Opcode VEX.0F 0xb4 - invalid. */
4398/* Opcode VEX.0F 0xb5 - invalid. */
4399/* Opcode VEX.0F 0xb6 - invalid. */
4400/* Opcode VEX.0F 0xb7 - invalid. */
4401/* Opcode VEX.0F 0xb8 - invalid. */
4402/* Opcode VEX.0F 0xb9 - invalid. */
4403/* Opcode VEX.0F 0xba - invalid. */
4404/* Opcode VEX.0F 0xbb - invalid. */
4405/* Opcode VEX.0F 0xbc - invalid. */
4406/* Opcode VEX.0F 0xbd - invalid. */
4407/* Opcode VEX.0F 0xbe - invalid. */
4408/* Opcode VEX.0F 0xbf - invalid. */
4409
4410/* Opcode VEX.0F 0xc0 - invalid. */
4411/* Opcode VEX.66.0F 0xc0 - invalid. */
4412/* Opcode VEX.F3.0F 0xc0 - invalid. */
4413/* Opcode VEX.F2.0F 0xc0 - invalid. */
4414
4415/* Opcode VEX.0F 0xc1 - invalid. */
4416/* Opcode VEX.66.0F 0xc1 - invalid. */
4417/* Opcode VEX.F3.0F 0xc1 - invalid. */
4418/* Opcode VEX.F2.0F 0xc1 - invalid. */
4419
4420/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
4421FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
4422/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
4423FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
4424/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
4425FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
4426/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
4427FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
4428
4429/* Opcode VEX.0F 0xc3 - invalid */
4430/* Opcode VEX.66.0F 0xc3 - invalid */
4431/* Opcode VEX.F3.0F 0xc3 - invalid */
4432/* Opcode VEX.F2.0F 0xc3 - invalid */
4433
4434/* Opcode VEX.0F 0xc4 - invalid */
4435
4436
4437/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
4438FNIEMOP_DEF(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib)
4439{
4440 /*IEMOP_MNEMONIC4(VEX_RMV, VPINSRW, vpinsrw, Vdq, Vdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
4441 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4442 if (IEM_IS_MODRM_REG_MODE(bRm))
4443 {
4444 /*
4445 * Register, register.
4446 */
4447 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4448 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4449 IEM_MC_BEGIN(4, 0);
4450 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4451 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4452 IEM_MC_ARG(uint16_t, u16Src, 2);
4453 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 3);
4454 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4455 IEM_MC_PREPARE_AVX_USAGE();
4456 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4457 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4458 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4459 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpinsrw_u128, iemAImpl_vpinsrw_u128_fallback),
4460 puDst, puSrc, u16Src, bEvilArg);
4461 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4462 IEM_MC_ADVANCE_RIP_AND_FINISH();
4463 IEM_MC_END();
4464 }
4465 else
4466 {
4467 /*
4468 * Register, memory.
4469 */
4470 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4471 IEM_MC_BEGIN(4, 1);
4472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4473 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4474 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4475 IEM_MC_ARG(uint16_t, u16Src, 2);
4476 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 3);
4477
4478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4479 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4480 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4481 IEM_MC_PREPARE_AVX_USAGE();
4482
4483 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4484 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4485 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4486 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpinsrw_u128, iemAImpl_vpinsrw_u128_fallback),
4487 puDst, puSrc, u16Src, bEvilArg);
4488 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4489
4490 IEM_MC_ADVANCE_RIP_AND_FINISH();
4491 IEM_MC_END();
4492 }
4493
4494 return VINF_SUCCESS;
4495}
4496
4497
4498/* Opcode VEX.F3.0F 0xc4 - invalid */
4499/* Opcode VEX.F2.0F 0xc4 - invalid */
4500
4501/* Opcode VEX.0F 0xc5 - invlid */
4502
4503
4504/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
4505FNIEMOP_DEF(iemOp_vpextrw_Gd_Udq_Ib)
4506{
4507 IEMOP_MNEMONIC3(VEX_RMI_REG, VPEXTRW, vpextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
4508 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4509 if (IEM_IS_MODRM_REG_MODE(bRm))
4510 {
4511 /*
4512 * Register, register.
4513 */
4514 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4515 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4516 IEM_MC_BEGIN(3, 1);
4517 IEM_MC_LOCAL(uint16_t, u16Dst);
4518 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
4519 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4520 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4521 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4522 IEM_MC_PREPARE_AVX_USAGE();
4523 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4524 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpextrw_u128, iemAImpl_vpextrw_u128_fallback),
4525 pu16Dst, puSrc, bEvilArg);
4526 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
4527 IEM_MC_ADVANCE_RIP_AND_FINISH();
4528 IEM_MC_END();
4529 return VINF_SUCCESS;
4530 }
4531
4532 /* No memory operand. */
4533 return IEMOP_RAISE_INVALID_OPCODE();
4534}
4535
4536
4537/* Opcode VEX.F3.0F 0xc5 - invalid */
4538/* Opcode VEX.F2.0F 0xc5 - invalid */
4539
4540
4541#define VSHUFP_X(a_Instr) \
4542 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4543 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4544 { \
4545 /* \
4546 * Register, register. \
4547 */ \
4548 if (pVCpu->iem.s.uVexLength) \
4549 { \
4550 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); \
4551 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4552 IEM_MC_BEGIN(4, 3); \
4553 IEM_MC_LOCAL(RTUINT256U, uDst); \
4554 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4555 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4556 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4557 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4558 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4559 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 3); \
4560 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); \
4561 IEM_MC_PREPARE_AVX_USAGE(); \
4562 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4563 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4564 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4565 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bEvilArg); \
4566 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4567 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4568 IEM_MC_END(); \
4569 } \
4570 else \
4571 { \
4572 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); \
4573 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4574 IEM_MC_BEGIN(4, 0); \
4575 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4576 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4577 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); \
4578 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 3); \
4579 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); \
4580 IEM_MC_PREPARE_AVX_USAGE(); \
4581 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4582 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4583 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4584 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4585 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bEvilArg); \
4586 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4587 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4588 IEM_MC_END(); \
4589 } \
4590 } \
4591 else \
4592 { \
4593 /* \
4594 * Register, memory. \
4595 */ \
4596 if (pVCpu->iem.s.uVexLength) \
4597 { \
4598 IEM_MC_BEGIN(4, 4); \
4599 IEM_MC_LOCAL(RTUINT256U, uDst); \
4600 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4601 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4603 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4604 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4605 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4607 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); \
4608 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 3); \
4609 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4610 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); \
4611 IEM_MC_PREPARE_AVX_USAGE(); \
4612 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4613 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4614 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4615 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bEvilArg); \
4616 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4617 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4618 IEM_MC_END(); \
4619 } \
4620 else \
4621 { \
4622 IEM_MC_BEGIN(4, 2); \
4623 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
4624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4625 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4626 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4627 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); \
4628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4629 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); \
4630 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 3); \
4631 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4632 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); \
4633 IEM_MC_PREPARE_AVX_USAGE(); \
4634 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4635 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4636 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4637 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4638 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bEvilArg); \
4639 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4640 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4641 IEM_MC_END(); \
4642 } \
4643 } \
4644 return VINF_SUCCESS;
4645
4646/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
4647FNIEMOP_DEF(iemOp_vshufps_Vps_Hps_Wps_Ib)
4648{
4649 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPS, vshufps, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4650 VSHUFP_X(vshufps);
4651}
4652
4653
4654/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
4655FNIEMOP_DEF(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib)
4656{
4657 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPD, vshufpd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4658 VSHUFP_X(vshufpd);
4659}
4660#undef VSHUFP_X
4661
4662
4663/* Opcode VEX.F3.0F 0xc6 - invalid */
4664/* Opcode VEX.F2.0F 0xc6 - invalid */
4665
4666/* Opcode VEX.0F 0xc7 - invalid */
4667/* Opcode VEX.66.0F 0xc7 - invalid */
4668/* Opcode VEX.F3.0F 0xc7 - invalid */
4669/* Opcode VEX.F2.0F 0xc7 - invalid */
4670
4671/* Opcode VEX.0F 0xc8 - invalid */
4672/* Opcode VEX.0F 0xc9 - invalid */
4673/* Opcode VEX.0F 0xca - invalid */
4674/* Opcode VEX.0F 0xcb - invalid */
4675/* Opcode VEX.0F 0xcc - invalid */
4676/* Opcode VEX.0F 0xcd - invalid */
4677/* Opcode VEX.0F 0xce - invalid */
4678/* Opcode VEX.0F 0xcf - invalid */
4679
4680
4681/* Opcode VEX.0F 0xd0 - invalid */
4682/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
4683FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
4684/* Opcode VEX.F3.0F 0xd0 - invalid */
4685/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
4686FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
4687
4688/* Opcode VEX.0F 0xd1 - invalid */
4689/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
4690FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
4691/* Opcode VEX.F3.0F 0xd1 - invalid */
4692/* Opcode VEX.F2.0F 0xd1 - invalid */
4693
4694/* Opcode VEX.0F 0xd2 - invalid */
4695/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
4696FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
4697/* Opcode VEX.F3.0F 0xd2 - invalid */
4698/* Opcode VEX.F2.0F 0xd2 - invalid */
4699
4700/* Opcode VEX.0F 0xd3 - invalid */
4701/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
4702FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
4703/* Opcode VEX.F3.0F 0xd3 - invalid */
4704/* Opcode VEX.F2.0F 0xd3 - invalid */
4705
4706/* Opcode VEX.0F 0xd4 - invalid */
4707
4708
4709/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
4710FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
4711{
4712 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4713 IEMOPMEDIAF3_INIT_VARS( vpaddq);
4714 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4715}
4716
4717
4718/* Opcode VEX.F3.0F 0xd4 - invalid */
4719/* Opcode VEX.F2.0F 0xd4 - invalid */
4720
4721/* Opcode VEX.0F 0xd5 - invalid */
4722
4723
4724/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
4725FNIEMOP_DEF(iemOp_vpmullw_Vx_Hx_Wx)
4726{
4727 IEMOP_MNEMONIC3(VEX_RVM, VPMULLW, vpmullw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4728 IEMOPMEDIAOPTF3_INIT_VARS(vpmullw);
4729 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4730}
4731
4732
4733/* Opcode VEX.F3.0F 0xd5 - invalid */
4734/* Opcode VEX.F2.0F 0xd5 - invalid */
4735
4736/* Opcode VEX.0F 0xd6 - invalid */
4737
4738/**
4739 * @opcode 0xd6
4740 * @oppfx 0x66
4741 * @opcpuid avx
4742 * @opgroup og_avx_pcksclr_datamove
4743 * @opxcpttype none
4744 * @optest op1=-1 op2=2 -> op1=2
4745 * @optest op1=0 op2=-42 -> op1=-42
4746 */
4747FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
4748{
4749 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4750 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4751 if (IEM_IS_MODRM_REG_MODE(bRm))
4752 {
4753 /*
4754 * Register, register.
4755 */
4756 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
4757 IEM_MC_BEGIN(0, 0);
4758
4759 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4760 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4761
4762 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4763 IEM_GET_MODRM_REG(pVCpu, bRm));
4764 IEM_MC_ADVANCE_RIP_AND_FINISH();
4765 IEM_MC_END();
4766 }
4767 else
4768 {
4769 /*
4770 * Memory, register.
4771 */
4772 IEM_MC_BEGIN(0, 2);
4773 IEM_MC_LOCAL(uint64_t, uSrc);
4774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4775
4776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4777 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
4778 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4779 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4780
4781 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4782 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4783
4784 IEM_MC_ADVANCE_RIP_AND_FINISH();
4785 IEM_MC_END();
4786 }
4787 return VINF_SUCCESS;
4788}
4789
4790/* Opcode VEX.F3.0F 0xd6 - invalid */
4791/* Opcode VEX.F2.0F 0xd6 - invalid */
4792
4793
4794/* Opcode VEX.0F 0xd7 - invalid */
4795
4796/** Opcode VEX.66.0F 0xd7 - */
4797FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
4798{
4799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4800 /* Docs says register only. */
4801 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
4802 {
4803 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
4804 IEMOP_MNEMONIC2(RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
4805 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4806 if (pVCpu->iem.s.uVexLength)
4807 {
4808 IEM_MC_BEGIN(2, 1);
4809 IEM_MC_ARG(uint64_t *, puDst, 0);
4810 IEM_MC_LOCAL(RTUINT256U, uSrc);
4811 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4812 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
4813 IEM_MC_PREPARE_AVX_USAGE();
4814 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4815 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4816 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
4817 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
4818 IEM_MC_ADVANCE_RIP_AND_FINISH();
4819 IEM_MC_END();
4820 }
4821 else
4822 {
4823 IEM_MC_BEGIN(2, 0);
4824 IEM_MC_ARG(uint64_t *, puDst, 0);
4825 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4826 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
4827 IEM_MC_PREPARE_AVX_USAGE();
4828 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4829 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4830 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
4831 IEM_MC_ADVANCE_RIP_AND_FINISH();
4832 IEM_MC_END();
4833 }
4834 return VINF_SUCCESS;
4835 }
4836 return IEMOP_RAISE_INVALID_OPCODE();
4837}
4838
4839
4840/* Opcode VEX.F3.0F 0xd7 - invalid */
4841/* Opcode VEX.F2.0F 0xd7 - invalid */
4842
4843
4844/* Opcode VEX.0F 0xd8 - invalid */
4845/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, W */
4846FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
4847/* Opcode VEX.F3.0F 0xd8 - invalid */
4848/* Opcode VEX.F2.0F 0xd8 - invalid */
4849
4850/* Opcode VEX.0F 0xd9 - invalid */
4851/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
4852FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
4853/* Opcode VEX.F3.0F 0xd9 - invalid */
4854/* Opcode VEX.F2.0F 0xd9 - invalid */
4855
4856/* Opcode VEX.0F 0xda - invalid */
4857
4858
4859/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
4860FNIEMOP_DEF(iemOp_vpminub_Vx_Hx_Wx)
4861{
4862 IEMOP_MNEMONIC3(VEX_RVM, VPMINUB, vpminub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4863 IEMOPMEDIAF3_INIT_VARS(vpminub);
4864 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4865}
4866
4867
4868/* Opcode VEX.F3.0F 0xda - invalid */
4869/* Opcode VEX.F2.0F 0xda - invalid */
4870
4871/* Opcode VEX.0F 0xdb - invalid */
4872
4873
4874/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
4875FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
4876{
4877 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4878 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4879 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
4880}
4881
4882
4883/* Opcode VEX.F3.0F 0xdb - invalid */
4884/* Opcode VEX.F2.0F 0xdb - invalid */
4885
4886/* Opcode VEX.0F 0xdc - invalid */
4887/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
4888FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
4889/* Opcode VEX.F3.0F 0xdc - invalid */
4890/* Opcode VEX.F2.0F 0xdc - invalid */
4891
4892/* Opcode VEX.0F 0xdd - invalid */
4893/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
4894FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
4895/* Opcode VEX.F3.0F 0xdd - invalid */
4896/* Opcode VEX.F2.0F 0xdd - invalid */
4897
4898/* Opcode VEX.0F 0xde - invalid */
4899
4900
4901/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, Wx */
4902FNIEMOP_DEF(iemOp_vpmaxub_Vx_Hx_Wx)
4903{
4904 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUB, vpmaxub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4905 IEMOPMEDIAF3_INIT_VARS(vpmaxub);
4906 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4907}
4908
4909
4910/* Opcode VEX.F3.0F 0xde - invalid */
4911/* Opcode VEX.F2.0F 0xde - invalid */
4912
4913/* Opcode VEX.0F 0xdf - invalid */
4914
4915
4916/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
4917FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
4918{
4919 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4920 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4921 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
4922}
4923
4924
4925/* Opcode VEX.F3.0F 0xdf - invalid */
4926/* Opcode VEX.F2.0F 0xdf - invalid */
4927
4928/* Opcode VEX.0F 0xe0 - invalid */
4929
4930
4931/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
4932FNIEMOP_DEF(iemOp_vpavgb_Vx_Hx_Wx)
4933{
4934 IEMOP_MNEMONIC3(VEX_RVM, VPAVGB, vpavgb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
4935 IEMOPMEDIAOPTF3_INIT_VARS(vpavgb);
4936 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4937}
4938
4939
4940/* Opcode VEX.F3.0F 0xe0 - invalid */
4941/* Opcode VEX.F2.0F 0xe0 - invalid */
4942
4943/* Opcode VEX.0F 0xe1 - invalid */
4944/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
4945FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
4946/* Opcode VEX.F3.0F 0xe1 - invalid */
4947/* Opcode VEX.F2.0F 0xe1 - invalid */
4948
4949/* Opcode VEX.0F 0xe2 - invalid */
4950/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
4951FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
4952/* Opcode VEX.F3.0F 0xe2 - invalid */
4953/* Opcode VEX.F2.0F 0xe2 - invalid */
4954
4955/* Opcode VEX.0F 0xe3 - invalid */
4956
4957
4958/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
4959FNIEMOP_DEF(iemOp_vpavgw_Vx_Hx_Wx)
4960{
4961 IEMOP_MNEMONIC3(VEX_RVM, VPAVGW, vpavgw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
4962 IEMOPMEDIAOPTF3_INIT_VARS(vpavgw);
4963 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4964}
4965
4966
4967/* Opcode VEX.F3.0F 0xe3 - invalid */
4968/* Opcode VEX.F2.0F 0xe3 - invalid */
4969
4970/* Opcode VEX.0F 0xe4 - invalid */
4971
4972
4973/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, Wx */
4974FNIEMOP_DEF(iemOp_vpmulhuw_Vx_Hx_Wx)
4975{
4976 IEMOP_MNEMONIC3(VEX_RVM, VPMULHUW, vpmulhuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
4977 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhuw);
4978 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4979}
4980
4981
4982/* Opcode VEX.F3.0F 0xe4 - invalid */
4983/* Opcode VEX.F2.0F 0xe4 - invalid */
4984
4985/* Opcode VEX.0F 0xe5 - invalid */
4986
4987
4988/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
4989FNIEMOP_DEF(iemOp_vpmulhw_Vx_Hx_Wx)
4990{
4991 IEMOP_MNEMONIC3(VEX_RVM, VPMULHW, vpmulhw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
4992 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhw);
4993 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4994}
4995
4996
4997/* Opcode VEX.F3.0F 0xe5 - invalid */
4998/* Opcode VEX.F2.0F 0xe5 - invalid */
4999
5000/* Opcode VEX.0F 0xe6 - invalid */
5001/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
5002FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
5003/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
5004FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
5005/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
5006FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
5007
5008
5009/* Opcode VEX.0F 0xe7 - invalid */
5010
5011/**
5012 * @opcode 0xe7
5013 * @opcodesub !11 mr/reg
5014 * @oppfx 0x66
5015 * @opcpuid avx
5016 * @opgroup og_avx_cachect
5017 * @opxcpttype 1
5018 * @optest op1=-1 op2=2 -> op1=2
5019 * @optest op1=0 op2=-42 -> op1=-42
5020 */
5021FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
5022{
5023 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5024 Assert(pVCpu->iem.s.uVexLength <= 1);
5025 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5026 if (IEM_IS_MODRM_MEM_MODE(bRm))
5027 {
5028 if (pVCpu->iem.s.uVexLength == 0)
5029 {
5030 /*
5031 * 128-bit: Memory, register.
5032 */
5033 IEM_MC_BEGIN(0, 2);
5034 IEM_MC_LOCAL(RTUINT128U, uSrc);
5035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5036
5037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5038 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
5039 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5040 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5041
5042 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
5043 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5044
5045 IEM_MC_ADVANCE_RIP_AND_FINISH();
5046 IEM_MC_END();
5047 }
5048 else
5049 {
5050 /*
5051 * 256-bit: Memory, register.
5052 */
5053 IEM_MC_BEGIN(0, 2);
5054 IEM_MC_LOCAL(RTUINT256U, uSrc);
5055 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5056
5057 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5058 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
5059 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5060 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5061
5062 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
5063 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5064
5065 IEM_MC_ADVANCE_RIP_AND_FINISH();
5066 IEM_MC_END();
5067 }
5068 return VINF_SUCCESS;
5069 }
5070 /**
5071 * @opdone
5072 * @opmnemonic udvex660fe7reg
5073 * @opcode 0xe7
5074 * @opcodesub 11 mr/reg
5075 * @oppfx 0x66
5076 * @opunused immediate
5077 * @opcpuid avx
5078 * @optest ->
5079 */
5080 return IEMOP_RAISE_INVALID_OPCODE();
5081}
5082
5083/* Opcode VEX.F3.0F 0xe7 - invalid */
5084/* Opcode VEX.F2.0F 0xe7 - invalid */
5085
5086
5087/* Opcode VEX.0F 0xe8 - invalid */
5088/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, W */
5089FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
5090/* Opcode VEX.F3.0F 0xe8 - invalid */
5091/* Opcode VEX.F2.0F 0xe8 - invalid */
5092
5093/* Opcode VEX.0F 0xe9 - invalid */
5094/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
5095FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
5096/* Opcode VEX.F3.0F 0xe9 - invalid */
5097/* Opcode VEX.F2.0F 0xe9 - invalid */
5098
5099/* Opcode VEX.0F 0xea - invalid */
5100
5101
5102/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
5103FNIEMOP_DEF(iemOp_vpminsw_Vx_Hx_Wx)
5104{
5105 IEMOP_MNEMONIC3(VEX_RVM, VPMINSW, vpminsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5106 IEMOPMEDIAF3_INIT_VARS(vpminsw);
5107 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5108}
5109
5110
5111/* Opcode VEX.F3.0F 0xea - invalid */
5112/* Opcode VEX.F2.0F 0xea - invalid */
5113
5114/* Opcode VEX.0F 0xeb - invalid */
5115
5116
5117/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
5118FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
5119{
5120 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5121 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
5122 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
5123}
5124
5125
5126
5127/* Opcode VEX.F3.0F 0xeb - invalid */
5128/* Opcode VEX.F2.0F 0xeb - invalid */
5129
5130/* Opcode VEX.0F 0xec - invalid */
5131/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
5132FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
5133/* Opcode VEX.F3.0F 0xec - invalid */
5134/* Opcode VEX.F2.0F 0xec - invalid */
5135
5136/* Opcode VEX.0F 0xed - invalid */
5137/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
5138FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
5139/* Opcode VEX.F3.0F 0xed - invalid */
5140/* Opcode VEX.F2.0F 0xed - invalid */
5141
5142/* Opcode VEX.0F 0xee - invalid */
5143
5144
5145/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, Wx */
5146FNIEMOP_DEF(iemOp_vpmaxsw_Vx_Hx_Wx)
5147{
5148 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSW, vpmaxsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5149 IEMOPMEDIAF3_INIT_VARS(vpmaxsw);
5150 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5151}
5152
5153
5154/* Opcode VEX.F3.0F 0xee - invalid */
5155/* Opcode VEX.F2.0F 0xee - invalid */
5156
5157
5158/* Opcode VEX.0F 0xef - invalid */
5159
5160
5161/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
5162FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
5163{
5164 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5165 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
5166 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
5167}
5168
5169
5170/* Opcode VEX.F3.0F 0xef - invalid */
5171/* Opcode VEX.F2.0F 0xef - invalid */
5172
5173/* Opcode VEX.0F 0xf0 - invalid */
5174/* Opcode VEX.66.0F 0xf0 - invalid */
5175
5176
5177/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
5178FNIEMOP_DEF(iemOp_vlddqu_Vx_Mx)
5179{
5180 IEMOP_MNEMONIC2(VEX_RM_MEM, VLDDQU, vlddqu, Vx_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5181 Assert(pVCpu->iem.s.uVexLength <= 1);
5182 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5183 if (IEM_IS_MODRM_REG_MODE(bRm))
5184 {
5185 /*
5186 * Register, register - (not implemented, assuming it raises \#UD).
5187 */
5188 return IEMOP_RAISE_INVALID_OPCODE();
5189 }
5190 else if (pVCpu->iem.s.uVexLength == 0)
5191 {
5192 /*
5193 * Register, memory128.
5194 */
5195 IEM_MC_BEGIN(0, 2);
5196 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5198
5199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5200 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
5201 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5202 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5203
5204 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5205 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
5206
5207 IEM_MC_ADVANCE_RIP_AND_FINISH();
5208 IEM_MC_END();
5209 }
5210 else
5211 {
5212 /*
5213 * Register, memory256.
5214 */
5215 IEM_MC_BEGIN(0, 2);
5216 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5217 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5218
5219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5220 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
5221 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5222 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5223
5224 IEM_MC_FETCH_MEM_U256(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5225 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
5226
5227 IEM_MC_ADVANCE_RIP_AND_FINISH();
5228 IEM_MC_END();
5229 }
5230 return VINF_SUCCESS;
5231}
5232
5233
5234/* Opcode VEX.0F 0xf1 - invalid */
5235/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
5236FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
5237/* Opcode VEX.F2.0F 0xf1 - invalid */
5238
5239/* Opcode VEX.0F 0xf2 - invalid */
5240/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
5241FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
5242/* Opcode VEX.F2.0F 0xf2 - invalid */
5243
5244/* Opcode VEX.0F 0xf3 - invalid */
5245/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
5246FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
5247/* Opcode VEX.F2.0F 0xf3 - invalid */
5248
5249/* Opcode VEX.0F 0xf4 - invalid */
5250
5251
5252/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
5253FNIEMOP_DEF(iemOp_vpmuludq_Vx_Hx_W)
5254{
5255 IEMOP_MNEMONIC3(VEX_RVM, VPMULUDQ, vpmuludq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5256 IEMOPMEDIAOPTF3_INIT_VARS(vpmuludq);
5257 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5258}
5259
5260
5261/* Opcode VEX.F2.0F 0xf4 - invalid */
5262
5263/* Opcode VEX.0F 0xf5 - invalid */
5264/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
5265FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
5266/* Opcode VEX.F2.0F 0xf5 - invalid */
5267
5268/* Opcode VEX.0F 0xf6 - invalid */
5269
5270
5271/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
5272FNIEMOP_DEF(iemOp_vpsadbw_Vx_Hx_Wx)
5273{
5274 IEMOP_MNEMONIC3(VEX_RVM, VPSADBW, vpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5275 IEMOPMEDIAOPTF3_INIT_VARS(vpsadbw);
5276 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5277}
5278
5279
5280/* Opcode VEX.F2.0F 0xf6 - invalid */
5281
5282/* Opcode VEX.0F 0xf7 - invalid */
5283/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
5284FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
5285/* Opcode VEX.F2.0F 0xf7 - invalid */
5286
5287/* Opcode VEX.0F 0xf8 - invalid */
5288
5289
5290/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
5291FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
5292{
5293 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5294 IEMOPMEDIAF3_INIT_VARS( vpsubb);
5295 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5296}
5297
5298
5299/* Opcode VEX.F2.0F 0xf8 - invalid */
5300
5301/* Opcode VEX.0F 0xf9 - invalid */
5302
5303
5304/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
5305FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
5306{
5307 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5308 IEMOPMEDIAF3_INIT_VARS( vpsubw);
5309 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5310}
5311
5312
5313/* Opcode VEX.F2.0F 0xf9 - invalid */
5314
5315/* Opcode VEX.0F 0xfa - invalid */
5316
5317
5318/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
5319FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
5320{
5321 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5322 IEMOPMEDIAF3_INIT_VARS( vpsubd);
5323 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5324}
5325
5326
5327/* Opcode VEX.F2.0F 0xfa - invalid */
5328
5329/* Opcode VEX.0F 0xfb - invalid */
5330
5331
5332/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
5333FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
5334{
5335 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5336 IEMOPMEDIAF3_INIT_VARS( vpsubq);
5337 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5338}
5339
5340
5341/* Opcode VEX.F2.0F 0xfb - invalid */
5342
5343/* Opcode VEX.0F 0xfc - invalid */
5344
5345
5346/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
5347FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
5348{
5349 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5350 IEMOPMEDIAF3_INIT_VARS( vpaddb);
5351 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5352}
5353
5354
5355/* Opcode VEX.F2.0F 0xfc - invalid */
5356
5357/* Opcode VEX.0F 0xfd - invalid */
5358
5359
5360/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
5361FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
5362{
5363 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5364 IEMOPMEDIAF3_INIT_VARS( vpaddw);
5365 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5366}
5367
5368
5369/* Opcode VEX.F2.0F 0xfd - invalid */
5370
5371/* Opcode VEX.0F 0xfe - invalid */
5372
5373
5374/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
5375FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
5376{
5377 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5378 IEMOPMEDIAF3_INIT_VARS( vpaddd);
5379 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5380}
5381
5382
5383/* Opcode VEX.F2.0F 0xfe - invalid */
5384
5385
5386/** Opcode **** 0x0f 0xff - UD0 */
5387FNIEMOP_DEF(iemOp_vud0)
5388{
5389 IEMOP_MNEMONIC(vud0, "vud0");
5390 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
5391 {
5392 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
5393#ifndef TST_IEM_CHECK_MC
5394 RTGCPTR GCPtrEff;
5395 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
5396 if (rcStrict != VINF_SUCCESS)
5397 return rcStrict;
5398#endif
5399 IEMOP_HLP_DONE_DECODING();
5400 }
5401 return IEMOP_RAISE_INVALID_OPCODE();
5402}
5403
5404
5405
5406/**
5407 * VEX opcode map \#1.
5408 *
5409 * @sa g_apfnTwoByteMap
5410 */
5411IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
5412{
5413 /* no prefix, 066h prefix f3h prefix, f2h prefix */
5414 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
5415 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
5416 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
5417 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
5418 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
5419 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
5420 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
5421 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
5422 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
5423 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
5424 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
5425 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
5426 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
5427 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
5428 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
5429 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
5430
5431 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
5432 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
5433 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
5434 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5435 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5436 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5437 /* 0x16 */ iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpd_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
5438 /* 0x17 */ iemOp_vmovhps_Mq_Vq, iemOp_vmovhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5439 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
5440 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
5441 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
5442 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
5443 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
5444 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
5445 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
5446 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
5447
5448 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
5449 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
5450 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
5451 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
5452 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
5453 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
5454 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
5455 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
5456 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5457 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5458 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
5459 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5460 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
5461 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
5462 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5463 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5464
5465 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
5466 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
5467 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
5468 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
5469 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
5470 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
5471 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
5472 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
5473 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5474 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5475 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5476 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5477 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5478 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5479 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5480 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5481
5482 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
5483 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
5484 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
5485 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
5486 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
5487 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
5488 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
5489 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
5490 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
5491 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
5492 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
5493 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
5494 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
5495 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
5496 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
5497 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
5498
5499 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5500 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
5501 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5502 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5503 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5504 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5505 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5506 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5507 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
5508 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
5509 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
5510 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
5511 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
5512 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
5513 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
5514 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
5515
5516 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5517 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5518 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5519 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5520 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5521 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5522 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5523 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5524 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5525 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5526 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5527 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5528 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5529 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5530 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5531 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
5532
5533 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
5534 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5535 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5536 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5537 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5538 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5539 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5540 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5541 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
5542 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
5543 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
5544 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
5545 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
5546 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
5547 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
5548 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
5549
5550 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
5551 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
5552 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
5553 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
5554 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
5555 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
5556 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
5557 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
5558 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
5559 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
5560 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
5561 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
5562 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
5563 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
5564 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
5565 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
5566
5567 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
5568 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
5569 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
5570 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
5571 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
5572 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
5573 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
5574 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
5575 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
5576 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
5577 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
5578 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
5579 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
5580 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
5581 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
5582 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
5583
5584 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5585 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5586 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
5587 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5588 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
5589 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
5590 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
5591 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5592 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5593 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5594 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
5595 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
5596 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
5597 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
5598 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
5599 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
5600
5601 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5602 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5603 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
5604 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5605 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
5606 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
5607 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
5608 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5609 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5610 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5611 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
5612 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
5613 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
5614 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
5615 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
5616 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
5617
5618 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5619 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5620 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
5621 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5622 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
5623 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
5624 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
5625 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5626 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5627 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5628 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
5629 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
5630 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
5631 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
5632 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
5633 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
5634
5635 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
5636 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5637 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5638 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5639 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5640 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5641 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5642 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5643 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5644 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5645 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5646 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5647 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5648 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5649 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5650 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5651
5652 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5653 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5654 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5655 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5656 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5657 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5658 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
5659 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5660 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5661 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5662 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5663 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5664 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5665 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5666 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5667 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5668
5669 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
5670 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5671 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5672 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5673 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5674 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5675 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5676 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5677 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5678 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5679 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5680 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5681 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5682 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5683 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5684 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
5685};
5686AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
5687/** @} */
5688
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette