VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h@ 96854

Last change on this file since 96854 was 96751, checked in by vboxsync, 2 years ago

VMM/IEM: Implement [v]comiss/[v]ucomiss/[v]comisd/[v]ucomisd instructions, bugref:9898 [Use already existing IEM_MC_COMMIT_EFLAGS() instead of introducing another one]

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 202.2 KB
Line 
1/* $Id: IEMAllInstructionsVexMap1.cpp.h 96751 2022-09-15 18:14:30Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 1
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128
39 * - vpxxx ymm0, ymm1, ymm2/mem256
40 *
41 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
42 */
43FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
44{
45 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
46 if (IEM_IS_MODRM_REG_MODE(bRm))
47 {
48 /*
49 * Register, register.
50 */
51 if (pVCpu->iem.s.uVexLength)
52 {
53 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
54 IEM_MC_BEGIN(4, 3);
55 IEM_MC_LOCAL(RTUINT256U, uDst);
56 IEM_MC_LOCAL(RTUINT256U, uSrc1);
57 IEM_MC_LOCAL(RTUINT256U, uSrc2);
58 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
59 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
60 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
61 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
62 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
63 IEM_MC_PREPARE_AVX_USAGE();
64 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
65 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
66 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
67 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
68 IEM_MC_ADVANCE_RIP();
69 IEM_MC_END();
70 }
71 else
72 {
73 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
74 IEM_MC_BEGIN(4, 0);
75 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
76 IEM_MC_ARG(PRTUINT128U, puDst, 1);
77 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
78 IEM_MC_ARG(PCRTUINT128U, puSrc2, 3);
79 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
80 IEM_MC_PREPARE_AVX_USAGE();
81 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
82 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
83 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
84 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
85 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
86 IEM_MC_ADVANCE_RIP();
87 IEM_MC_END();
88 }
89 }
90 else
91 {
92 /*
93 * Register, memory.
94 */
95 if (pVCpu->iem.s.uVexLength)
96 {
97 IEM_MC_BEGIN(4, 4);
98 IEM_MC_LOCAL(RTUINT256U, uDst);
99 IEM_MC_LOCAL(RTUINT256U, uSrc1);
100 IEM_MC_LOCAL(RTUINT256U, uSrc2);
101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
102 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
103 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 1);
104 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 2);
105 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 3);
106
107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
108 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
109 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
110 IEM_MC_PREPARE_AVX_USAGE();
111
112 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
113 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
114 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
115 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 }
120 else
121 {
122 IEM_MC_BEGIN(4, 2);
123 IEM_MC_LOCAL(RTUINT128U, uSrc2);
124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
125 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
126 IEM_MC_ARG(PRTUINT128U, puDst, 1);
127 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2);
128 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 3);
129
130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
131 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
132 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
133 IEM_MC_PREPARE_AVX_USAGE();
134
135 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
136 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
137 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
138 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
139 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
140
141 IEM_MC_ADVANCE_RIP();
142 IEM_MC_END();
143 }
144 }
145 return VINF_SUCCESS;
146}
147
148
149/**
150 * Common worker for AVX2 instructions on the forms:
151 * - vpxxx xmm0, xmm1, xmm2/mem128
152 * - vpxxx ymm0, ymm1, ymm2/mem256
153 *
154 * Takes function table for function w/o implicit state parameter.
155 *
156 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
157 */
158FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
159{
160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
161 if (IEM_IS_MODRM_REG_MODE(bRm))
162 {
163 /*
164 * Register, register.
165 */
166 if (pVCpu->iem.s.uVexLength)
167 {
168 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
169 IEM_MC_BEGIN(3, 3);
170 IEM_MC_LOCAL(RTUINT256U, uDst);
171 IEM_MC_LOCAL(RTUINT256U, uSrc1);
172 IEM_MC_LOCAL(RTUINT256U, uSrc2);
173 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
174 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
175 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
176 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
177 IEM_MC_PREPARE_AVX_USAGE();
178 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
179 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
180 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
181 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
182 IEM_MC_ADVANCE_RIP();
183 IEM_MC_END();
184 }
185 else
186 {
187 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
188 IEM_MC_BEGIN(3, 0);
189 IEM_MC_ARG(PRTUINT128U, puDst, 0);
190 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
191 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
192 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
193 IEM_MC_PREPARE_AVX_USAGE();
194 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
195 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
196 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
197 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
198 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
199 IEM_MC_ADVANCE_RIP();
200 IEM_MC_END();
201 }
202 }
203 else
204 {
205 /*
206 * Register, memory.
207 */
208 if (pVCpu->iem.s.uVexLength)
209 {
210 IEM_MC_BEGIN(3, 4);
211 IEM_MC_LOCAL(RTUINT256U, uDst);
212 IEM_MC_LOCAL(RTUINT256U, uSrc1);
213 IEM_MC_LOCAL(RTUINT256U, uSrc2);
214 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
215 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
216 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
217 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
218
219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
220 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
221 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
222 IEM_MC_PREPARE_AVX_USAGE();
223
224 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
225 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
226 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
227 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
228
229 IEM_MC_ADVANCE_RIP();
230 IEM_MC_END();
231 }
232 else
233 {
234 IEM_MC_BEGIN(3, 2);
235 IEM_MC_LOCAL(RTUINT128U, uSrc2);
236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
237 IEM_MC_ARG(PRTUINT128U, puDst, 0);
238 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
239 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
240
241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
242 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
243 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
244 IEM_MC_PREPARE_AVX_USAGE();
245
246 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
247 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
248 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
249 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
250 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
251
252 IEM_MC_ADVANCE_RIP();
253 IEM_MC_END();
254 }
255 }
256 return VINF_SUCCESS;
257}
258
259
260/**
261 * Common worker for AVX2 instructions on the forms:
262 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
263 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
264 *
265 * The 128-bit memory version of this instruction may elect to skip fetching the
266 * lower 64 bits of the operand. We, however, do not.
267 *
268 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
269 */
270FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
271{
272 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
273}
274
275
276/**
277 * Common worker for AVX2 instructions on the forms:
278 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
279 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
280 *
281 * The 128-bit memory version of this instruction may elect to skip fetching the
282 * higher 64 bits of the operand. We, however, do not.
283 *
284 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
285 */
286FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
287{
288 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
289}
290
291
292/**
293 * Common worker for AVX2 instructions on the forms:
294 * - vpxxx xmm0, xmm1/mem128
295 * - vpxxx ymm0, ymm1/mem256
296 *
297 * Takes function table for function w/o implicit state parameter.
298 *
299 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
300 */
301FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, PCIEMOPMEDIAOPTF2, pImpl)
302{
303 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
304 if (IEM_IS_MODRM_REG_MODE(bRm))
305 {
306 /*
307 * Register, register.
308 */
309 if (pVCpu->iem.s.uVexLength)
310 {
311 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
312 IEM_MC_BEGIN(2, 2);
313 IEM_MC_LOCAL(RTUINT256U, uDst);
314 IEM_MC_LOCAL(RTUINT256U, uSrc);
315 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
316 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
317 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
318 IEM_MC_PREPARE_AVX_USAGE();
319 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
320 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
321 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
322 IEM_MC_ADVANCE_RIP();
323 IEM_MC_END();
324 }
325 else
326 {
327 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
328 IEM_MC_BEGIN(2, 0);
329 IEM_MC_ARG(PRTUINT128U, puDst, 0);
330 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
331 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
332 IEM_MC_PREPARE_AVX_USAGE();
333 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
334 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
335 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
336 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
337 IEM_MC_ADVANCE_RIP();
338 IEM_MC_END();
339 }
340 }
341 else
342 {
343 /*
344 * Register, memory.
345 */
346 if (pVCpu->iem.s.uVexLength)
347 {
348 IEM_MC_BEGIN(2, 3);
349 IEM_MC_LOCAL(RTUINT256U, uDst);
350 IEM_MC_LOCAL(RTUINT256U, uSrc);
351 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
352 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
353 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
354
355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
356 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
357 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
358 IEM_MC_PREPARE_AVX_USAGE();
359
360 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
361 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
362 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
363
364 IEM_MC_ADVANCE_RIP();
365 IEM_MC_END();
366 }
367 else
368 {
369 IEM_MC_BEGIN(2, 2);
370 IEM_MC_LOCAL(RTUINT128U, uSrc);
371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
372 IEM_MC_ARG(PRTUINT128U, puDst, 0);
373 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
374
375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
376 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
377 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
378 IEM_MC_PREPARE_AVX_USAGE();
379
380 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
381 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
382 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
383 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
384
385 IEM_MC_ADVANCE_RIP();
386 IEM_MC_END();
387 }
388 }
389 return VINF_SUCCESS;
390}
391
392
393/* Opcode VEX.0F 0x00 - invalid */
394/* Opcode VEX.0F 0x01 - invalid */
395/* Opcode VEX.0F 0x02 - invalid */
396/* Opcode VEX.0F 0x03 - invalid */
397/* Opcode VEX.0F 0x04 - invalid */
398/* Opcode VEX.0F 0x05 - invalid */
399/* Opcode VEX.0F 0x06 - invalid */
400/* Opcode VEX.0F 0x07 - invalid */
401/* Opcode VEX.0F 0x08 - invalid */
402/* Opcode VEX.0F 0x09 - invalid */
403/* Opcode VEX.0F 0x0a - invalid */
404
405/** Opcode VEX.0F 0x0b. */
406FNIEMOP_DEF(iemOp_vud2)
407{
408 IEMOP_MNEMONIC(vud2, "vud2");
409 return IEMOP_RAISE_INVALID_OPCODE();
410}
411
412/* Opcode VEX.0F 0x0c - invalid */
413/* Opcode VEX.0F 0x0d - invalid */
414/* Opcode VEX.0F 0x0e - invalid */
415/* Opcode VEX.0F 0x0f - invalid */
416
417
418/**
419 * @opcode 0x10
420 * @oppfx none
421 * @opcpuid avx
422 * @opgroup og_avx_simdfp_datamove
423 * @opxcpttype 4UA
424 * @optest op1=1 op2=2 -> op1=2
425 * @optest op1=0 op2=-22 -> op1=-22
426 */
427FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
428{
429 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
430 Assert(pVCpu->iem.s.uVexLength <= 1);
431 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
432 if (IEM_IS_MODRM_REG_MODE(bRm))
433 {
434 /*
435 * Register, register.
436 */
437 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
438 IEM_MC_BEGIN(0, 0);
439 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
440 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
441 if (pVCpu->iem.s.uVexLength == 0)
442 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
443 IEM_GET_MODRM_RM(pVCpu, bRm));
444 else
445 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
446 IEM_GET_MODRM_RM(pVCpu, bRm));
447 IEM_MC_ADVANCE_RIP();
448 IEM_MC_END();
449 }
450 else if (pVCpu->iem.s.uVexLength == 0)
451 {
452 /*
453 * 128-bit: Register, Memory
454 */
455 IEM_MC_BEGIN(0, 2);
456 IEM_MC_LOCAL(RTUINT128U, uSrc);
457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
458
459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
460 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
461 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
462 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
463
464 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
465 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
466
467 IEM_MC_ADVANCE_RIP();
468 IEM_MC_END();
469 }
470 else
471 {
472 /*
473 * 256-bit: Register, Memory
474 */
475 IEM_MC_BEGIN(0, 2);
476 IEM_MC_LOCAL(RTUINT256U, uSrc);
477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
478
479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
480 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
481 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
482 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
483
484 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
485 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
486
487 IEM_MC_ADVANCE_RIP();
488 IEM_MC_END();
489 }
490 return VINF_SUCCESS;
491}
492
493
494/**
495 * @opcode 0x10
496 * @oppfx 0x66
497 * @opcpuid avx
498 * @opgroup og_avx_simdfp_datamove
499 * @opxcpttype 4UA
500 * @optest op1=1 op2=2 -> op1=2
501 * @optest op1=0 op2=-22 -> op1=-22
502 */
503FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
504{
505 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
506 Assert(pVCpu->iem.s.uVexLength <= 1);
507 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
508 if (IEM_IS_MODRM_REG_MODE(bRm))
509 {
510 /*
511 * Register, register.
512 */
513 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
514 IEM_MC_BEGIN(0, 0);
515 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
516 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
517 if (pVCpu->iem.s.uVexLength == 0)
518 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
519 IEM_GET_MODRM_RM(pVCpu, bRm));
520 else
521 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
522 IEM_GET_MODRM_RM(pVCpu, bRm));
523 IEM_MC_ADVANCE_RIP();
524 IEM_MC_END();
525 }
526 else if (pVCpu->iem.s.uVexLength == 0)
527 {
528 /*
529 * 128-bit: Memory, register.
530 */
531 IEM_MC_BEGIN(0, 2);
532 IEM_MC_LOCAL(RTUINT128U, uSrc);
533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
534
535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
536 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
537 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
538 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
539
540 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
541 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
542
543 IEM_MC_ADVANCE_RIP();
544 IEM_MC_END();
545 }
546 else
547 {
548 /*
549 * 256-bit: Memory, register.
550 */
551 IEM_MC_BEGIN(0, 2);
552 IEM_MC_LOCAL(RTUINT256U, uSrc);
553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
554
555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
556 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
557 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
558 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
559
560 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
561 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
562
563 IEM_MC_ADVANCE_RIP();
564 IEM_MC_END();
565 }
566 return VINF_SUCCESS;
567}
568
569
570FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
571{
572 Assert(pVCpu->iem.s.uVexLength <= 1);
573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
574 if (IEM_IS_MODRM_REG_MODE(bRm))
575 {
576 /**
577 * @opcode 0x10
578 * @oppfx 0xf3
579 * @opcodesub 11 mr/reg
580 * @opcpuid avx
581 * @opgroup og_avx_simdfp_datamerge
582 * @opxcpttype 5
583 * @optest op1=1 op2=0 op3=2 -> op1=2
584 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
585 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
586 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
587 * @note HssHi refers to bits 127:32.
588 */
589 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
590 IEMOP_HLP_DONE_VEX_DECODING();
591 IEM_MC_BEGIN(0, 0);
592
593 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
594 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
595 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
596 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
597 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
598 IEM_MC_ADVANCE_RIP();
599 IEM_MC_END();
600 }
601 else
602 {
603 /**
604 * @opdone
605 * @opcode 0x10
606 * @oppfx 0xf3
607 * @opcodesub !11 mr/reg
608 * @opcpuid avx
609 * @opgroup og_avx_simdfp_datamove
610 * @opxcpttype 5
611 * @opfunction iemOp_vmovss_Vss_Hss_Wss
612 * @optest op1=1 op2=2 -> op1=2
613 * @optest op1=0 op2=-22 -> op1=-22
614 */
615 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
616 IEM_MC_BEGIN(0, 2);
617 IEM_MC_LOCAL(uint32_t, uSrc);
618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
619
620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
621 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
622 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
623 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
624
625 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
626 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
627
628 IEM_MC_ADVANCE_RIP();
629 IEM_MC_END();
630 }
631
632 return VINF_SUCCESS;
633}
634
635
636FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
637{
638 Assert(pVCpu->iem.s.uVexLength <= 1);
639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
640 if (IEM_IS_MODRM_REG_MODE(bRm))
641 {
642 /**
643 * @opcode 0x10
644 * @oppfx 0xf2
645 * @opcodesub 11 mr/reg
646 * @opcpuid avx
647 * @opgroup og_avx_simdfp_datamerge
648 * @opxcpttype 5
649 * @optest op1=1 op2=0 op3=2 -> op1=2
650 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
651 * @optest op1=3 op2=-1 op3=0x77 ->
652 * op1=0xffffffffffffffff0000000000000077
653 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
654 */
655 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
656 IEMOP_HLP_DONE_VEX_DECODING();
657 IEM_MC_BEGIN(0, 0);
658
659 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
660 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
661 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
662 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
663 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
664 IEM_MC_ADVANCE_RIP();
665 IEM_MC_END();
666 }
667 else
668 {
669 /**
670 * @opdone
671 * @opcode 0x10
672 * @oppfx 0xf2
673 * @opcodesub !11 mr/reg
674 * @opcpuid avx
675 * @opgroup og_avx_simdfp_datamove
676 * @opxcpttype 5
677 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
678 * @optest op1=1 op2=2 -> op1=2
679 * @optest op1=0 op2=-22 -> op1=-22
680 */
681 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
682 IEM_MC_BEGIN(0, 2);
683 IEM_MC_LOCAL(uint64_t, uSrc);
684 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
685
686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
687 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
688 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
689 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
690
691 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
692 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
693
694 IEM_MC_ADVANCE_RIP();
695 IEM_MC_END();
696 }
697
698 return VINF_SUCCESS;
699}
700
701
702/**
703 * @opcode 0x11
704 * @oppfx none
705 * @opcpuid avx
706 * @opgroup og_avx_simdfp_datamove
707 * @opxcpttype 4UA
708 * @optest op1=1 op2=2 -> op1=2
709 * @optest op1=0 op2=-22 -> op1=-22
710 */
711FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
712{
713 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
714 Assert(pVCpu->iem.s.uVexLength <= 1);
715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
716 if (IEM_IS_MODRM_REG_MODE(bRm))
717 {
718 /*
719 * Register, register.
720 */
721 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
722 IEM_MC_BEGIN(0, 0);
723 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
724 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
725 if (pVCpu->iem.s.uVexLength == 0)
726 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
727 IEM_GET_MODRM_REG(pVCpu, bRm));
728 else
729 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
730 IEM_GET_MODRM_REG(pVCpu, bRm));
731 IEM_MC_ADVANCE_RIP();
732 IEM_MC_END();
733 }
734 else if (pVCpu->iem.s.uVexLength == 0)
735 {
736 /*
737 * 128-bit: Memory, register.
738 */
739 IEM_MC_BEGIN(0, 2);
740 IEM_MC_LOCAL(RTUINT128U, uSrc);
741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
742
743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
744 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
745 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
746 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
747
748 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
749 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
750
751 IEM_MC_ADVANCE_RIP();
752 IEM_MC_END();
753 }
754 else
755 {
756 /*
757 * 256-bit: Memory, register.
758 */
759 IEM_MC_BEGIN(0, 2);
760 IEM_MC_LOCAL(RTUINT256U, uSrc);
761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
762
763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
764 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
765 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
766 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
767
768 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
769 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
770
771 IEM_MC_ADVANCE_RIP();
772 IEM_MC_END();
773 }
774 return VINF_SUCCESS;
775}
776
777
778/**
779 * @opcode 0x11
780 * @oppfx 0x66
781 * @opcpuid avx
782 * @opgroup og_avx_simdfp_datamove
783 * @opxcpttype 4UA
784 * @optest op1=1 op2=2 -> op1=2
785 * @optest op1=0 op2=-22 -> op1=-22
786 */
787FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
788{
789 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
790 Assert(pVCpu->iem.s.uVexLength <= 1);
791 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
792 if (IEM_IS_MODRM_REG_MODE(bRm))
793 {
794 /*
795 * Register, register.
796 */
797 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
798 IEM_MC_BEGIN(0, 0);
799 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
800 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
801 if (pVCpu->iem.s.uVexLength == 0)
802 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
803 IEM_GET_MODRM_REG(pVCpu, bRm));
804 else
805 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
806 IEM_GET_MODRM_REG(pVCpu, bRm));
807 IEM_MC_ADVANCE_RIP();
808 IEM_MC_END();
809 }
810 else if (pVCpu->iem.s.uVexLength == 0)
811 {
812 /*
813 * 128-bit: Memory, register.
814 */
815 IEM_MC_BEGIN(0, 2);
816 IEM_MC_LOCAL(RTUINT128U, uSrc);
817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
818
819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
820 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
821 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
822 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
823
824 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
825 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
826
827 IEM_MC_ADVANCE_RIP();
828 IEM_MC_END();
829 }
830 else
831 {
832 /*
833 * 256-bit: Memory, register.
834 */
835 IEM_MC_BEGIN(0, 2);
836 IEM_MC_LOCAL(RTUINT256U, uSrc);
837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
838
839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
840 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
841 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
842 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
843
844 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
845 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
846
847 IEM_MC_ADVANCE_RIP();
848 IEM_MC_END();
849 }
850 return VINF_SUCCESS;
851}
852
853
854FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
855{
856 Assert(pVCpu->iem.s.uVexLength <= 1);
857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
858 if (IEM_IS_MODRM_REG_MODE(bRm))
859 {
860 /**
861 * @opcode 0x11
862 * @oppfx 0xf3
863 * @opcodesub 11 mr/reg
864 * @opcpuid avx
865 * @opgroup og_avx_simdfp_datamerge
866 * @opxcpttype 5
867 * @optest op1=1 op2=0 op3=2 -> op1=2
868 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
869 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
870 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
871 */
872 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
873 IEMOP_HLP_DONE_VEX_DECODING();
874 IEM_MC_BEGIN(0, 0);
875
876 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
877 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
878 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
879 IEM_GET_MODRM_REG(pVCpu, bRm),
880 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
881 IEM_MC_ADVANCE_RIP();
882 IEM_MC_END();
883 }
884 else
885 {
886 /**
887 * @opdone
888 * @opcode 0x11
889 * @oppfx 0xf3
890 * @opcodesub !11 mr/reg
891 * @opcpuid avx
892 * @opgroup og_avx_simdfp_datamove
893 * @opxcpttype 5
894 * @opfunction iemOp_vmovss_Vss_Hss_Wss
895 * @optest op1=1 op2=2 -> op1=2
896 * @optest op1=0 op2=-22 -> op1=-22
897 */
898 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
899 IEM_MC_BEGIN(0, 2);
900 IEM_MC_LOCAL(uint32_t, uSrc);
901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
902
903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
904 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
905 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
906 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
907
908 IEM_MC_FETCH_YREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
909 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
910
911 IEM_MC_ADVANCE_RIP();
912 IEM_MC_END();
913 }
914
915 return VINF_SUCCESS;
916}
917
918
919FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
920{
921 Assert(pVCpu->iem.s.uVexLength <= 1);
922 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
923 if (IEM_IS_MODRM_REG_MODE(bRm))
924 {
925 /**
926 * @opcode 0x11
927 * @oppfx 0xf2
928 * @opcodesub 11 mr/reg
929 * @opcpuid avx
930 * @opgroup og_avx_simdfp_datamerge
931 * @opxcpttype 5
932 * @optest op1=1 op2=0 op3=2 -> op1=2
933 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
934 * @optest op1=3 op2=-1 op3=0x77 ->
935 * op1=0xffffffffffffffff0000000000000077
936 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
937 */
938 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
939 IEMOP_HLP_DONE_VEX_DECODING();
940 IEM_MC_BEGIN(0, 0);
941
942 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
943 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
944 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
945 IEM_GET_MODRM_REG(pVCpu, bRm),
946 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
947 IEM_MC_ADVANCE_RIP();
948 IEM_MC_END();
949 }
950 else
951 {
952 /**
953 * @opdone
954 * @opcode 0x11
955 * @oppfx 0xf2
956 * @opcodesub !11 mr/reg
957 * @opcpuid avx
958 * @opgroup og_avx_simdfp_datamove
959 * @opxcpttype 5
960 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
961 * @optest op1=1 op2=2 -> op1=2
962 * @optest op1=0 op2=-22 -> op1=-22
963 */
964 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
965 IEM_MC_BEGIN(0, 2);
966 IEM_MC_LOCAL(uint64_t, uSrc);
967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
968
969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
970 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
971 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
972 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
973
974 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
975 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
976
977 IEM_MC_ADVANCE_RIP();
978 IEM_MC_END();
979 }
980
981 return VINF_SUCCESS;
982}
983
984
985FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
986{
987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
988 if (IEM_IS_MODRM_REG_MODE(bRm))
989 {
990 /**
991 * @opcode 0x12
992 * @opcodesub 11 mr/reg
993 * @oppfx none
994 * @opcpuid avx
995 * @opgroup og_avx_simdfp_datamerge
996 * @opxcpttype 7LZ
997 * @optest op2=0x2200220122022203
998 * op3=0x3304330533063307
999 * -> op1=0x22002201220222033304330533063307
1000 * @optest op2=-1 op3=-42 -> op1=-42
1001 * @note op3 and op2 are only the 8-byte high XMM register halfs.
1002 */
1003 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1004
1005 IEMOP_HLP_DONE_VEX_DECODING_L0();
1006 IEM_MC_BEGIN(0, 0);
1007
1008 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1009 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1010 IEM_MC_MERGE_YREG_U64HI_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1011 IEM_GET_MODRM_RM(pVCpu, bRm),
1012 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1013
1014 IEM_MC_ADVANCE_RIP();
1015 IEM_MC_END();
1016 }
1017 else
1018 {
1019 /**
1020 * @opdone
1021 * @opcode 0x12
1022 * @opcodesub !11 mr/reg
1023 * @oppfx none
1024 * @opcpuid avx
1025 * @opgroup og_avx_simdfp_datamove
1026 * @opxcpttype 5LZ
1027 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1028 * @optest op1=1 op2=0 op3=0 -> op1=0
1029 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
1030 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
1031 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
1032 */
1033 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1034
1035 IEM_MC_BEGIN(0, 2);
1036 IEM_MC_LOCAL(uint64_t, uSrc);
1037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1038
1039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1040 IEMOP_HLP_DONE_VEX_DECODING_L0();
1041 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1042 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1043
1044 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1045 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1046 uSrc,
1047 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1048
1049 IEM_MC_ADVANCE_RIP();
1050 IEM_MC_END();
1051 }
1052 return VINF_SUCCESS;
1053}
1054
1055
1056/**
1057 * @opcode 0x12
1058 * @opcodesub !11 mr/reg
1059 * @oppfx 0x66
1060 * @opcpuid avx
1061 * @opgroup og_avx_pcksclr_datamerge
1062 * @opxcpttype 5LZ
1063 * @optest op2=0 op3=2 -> op1=2
1064 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
1065 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
1066 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
1067 */
1068FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1069{
1070 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1071 if (IEM_IS_MODRM_MEM_MODE(bRm))
1072 {
1073 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1074
1075 IEM_MC_BEGIN(0, 2);
1076 IEM_MC_LOCAL(uint64_t, uSrc);
1077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1078
1079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1080 IEMOP_HLP_DONE_VEX_DECODING_L0();
1081 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1082 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1083
1084 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1085 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1086 uSrc,
1087 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1088
1089 IEM_MC_ADVANCE_RIP();
1090 IEM_MC_END();
1091 return VINF_SUCCESS;
1092 }
1093
1094 /**
1095 * @opdone
1096 * @opmnemonic udvex660f12m3
1097 * @opcode 0x12
1098 * @opcodesub 11 mr/reg
1099 * @oppfx 0x66
1100 * @opunused immediate
1101 * @opcpuid avx
1102 * @optest ->
1103 */
1104 return IEMOP_RAISE_INVALID_OPCODE();
1105}
1106
1107
1108/**
1109 * @opcode 0x12
1110 * @oppfx 0xf3
1111 * @opcpuid avx
1112 * @opgroup og_avx_pcksclr_datamove
1113 * @opxcpttype 4
1114 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1115 * -> op1=0x00000002000000020000000100000001
1116 * @optest vex.l==1 /
1117 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1118 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1119 */
1120FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1121{
1122 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1123 Assert(pVCpu->iem.s.uVexLength <= 1);
1124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1125 if (IEM_IS_MODRM_REG_MODE(bRm))
1126 {
1127 /*
1128 * Register, register.
1129 */
1130 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1131 if (pVCpu->iem.s.uVexLength == 0)
1132 {
1133 IEM_MC_BEGIN(2, 0);
1134 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1135 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1136
1137 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1138 IEM_MC_PREPARE_AVX_USAGE();
1139
1140 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1141 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1142 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1143 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1144
1145 IEM_MC_ADVANCE_RIP();
1146 IEM_MC_END();
1147 }
1148 else
1149 {
1150 IEM_MC_BEGIN(3, 0);
1151 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1152 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1153 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1154
1155 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1156 IEM_MC_PREPARE_AVX_USAGE();
1157 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rr, iYRegDst, iYRegSrc);
1158
1159 IEM_MC_ADVANCE_RIP();
1160 IEM_MC_END();
1161 }
1162 }
1163 else
1164 {
1165 /*
1166 * Register, memory.
1167 */
1168 if (pVCpu->iem.s.uVexLength == 0)
1169 {
1170 IEM_MC_BEGIN(2, 2);
1171 IEM_MC_LOCAL(RTUINT128U, uSrc);
1172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1173 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1174 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1175
1176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1177 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1178 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1179 IEM_MC_PREPARE_AVX_USAGE();
1180
1181 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1182 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1183 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movsldup, puDst, puSrc);
1184 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1185
1186 IEM_MC_ADVANCE_RIP();
1187 IEM_MC_END();
1188 }
1189 else
1190 {
1191 IEM_MC_BEGIN(3, 2);
1192 IEM_MC_LOCAL(RTUINT256U, uSrc);
1193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1194 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1195 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1196 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1197
1198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1199 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1200 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1201 IEM_MC_PREPARE_AVX_USAGE();
1202
1203 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1204 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovsldup_256_rm, iYRegDst, puSrc);
1205
1206 IEM_MC_ADVANCE_RIP();
1207 IEM_MC_END();
1208 }
1209 }
1210 return VINF_SUCCESS;
1211}
1212
1213
1214/**
1215 * @opcode 0x12
1216 * @oppfx 0xf2
1217 * @opcpuid avx
1218 * @opgroup og_avx_pcksclr_datamove
1219 * @opxcpttype 5
1220 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1221 * -> op1=0x22222222111111112222222211111111
1222 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1223 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1224 */
1225FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1226{
1227 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1228 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1229 if (IEM_IS_MODRM_REG_MODE(bRm))
1230 {
1231 /*
1232 * Register, register.
1233 */
1234 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1235 if (pVCpu->iem.s.uVexLength == 0)
1236 {
1237 IEM_MC_BEGIN(2, 0);
1238 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1239 IEM_MC_ARG(uint64_t, uSrc, 1);
1240
1241 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1242 IEM_MC_PREPARE_AVX_USAGE();
1243
1244 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1245 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1246 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1247 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1248
1249 IEM_MC_ADVANCE_RIP();
1250 IEM_MC_END();
1251 }
1252 else
1253 {
1254 IEM_MC_BEGIN(3, 0);
1255 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1256 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1257 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1258
1259 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1260 IEM_MC_PREPARE_AVX_USAGE();
1261 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rr, iYRegDst, iYRegSrc);
1262
1263 IEM_MC_ADVANCE_RIP();
1264 IEM_MC_END();
1265 }
1266 }
1267 else
1268 {
1269 /*
1270 * Register, memory.
1271 */
1272 if (pVCpu->iem.s.uVexLength == 0)
1273 {
1274 IEM_MC_BEGIN(2, 2);
1275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1276 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1277 IEM_MC_ARG(uint64_t, uSrc, 1);
1278
1279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1280 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1281 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1282 IEM_MC_PREPARE_AVX_USAGE();
1283
1284 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1285 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1286 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movddup, puDst, uSrc);
1287 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1288
1289 IEM_MC_ADVANCE_RIP();
1290 IEM_MC_END();
1291 }
1292 else
1293 {
1294 IEM_MC_BEGIN(3, 2);
1295 IEM_MC_LOCAL(RTUINT256U, uSrc);
1296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1297 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1298 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1299 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1300
1301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1302 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1303 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1304 IEM_MC_PREPARE_AVX_USAGE();
1305
1306 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1307 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovddup_256_rm, iYRegDst, puSrc);
1308
1309 IEM_MC_ADVANCE_RIP();
1310 IEM_MC_END();
1311 }
1312 }
1313 return VINF_SUCCESS;
1314}
1315
1316
1317/**
1318 * @opcode 0x13
1319 * @opcodesub !11 mr/reg
1320 * @oppfx none
1321 * @opcpuid avx
1322 * @opgroup og_avx_simdfp_datamove
1323 * @opxcpttype 5
1324 * @optest op1=1 op2=2 -> op1=2
1325 * @optest op1=0 op2=-42 -> op1=-42
1326 */
1327FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1328{
1329 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1330 if (IEM_IS_MODRM_MEM_MODE(bRm))
1331 {
1332 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1333
1334 IEM_MC_BEGIN(0, 2);
1335 IEM_MC_LOCAL(uint64_t, uSrc);
1336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1337
1338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1339 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1340 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1341 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1342
1343 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1344 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1345
1346 IEM_MC_ADVANCE_RIP();
1347 IEM_MC_END();
1348 return VINF_SUCCESS;
1349 }
1350
1351 /**
1352 * @opdone
1353 * @opmnemonic udvex0f13m3
1354 * @opcode 0x13
1355 * @opcodesub 11 mr/reg
1356 * @oppfx none
1357 * @opunused immediate
1358 * @opcpuid avx
1359 * @optest ->
1360 */
1361 return IEMOP_RAISE_INVALID_OPCODE();
1362}
1363
1364
1365/**
1366 * @opcode 0x13
1367 * @opcodesub !11 mr/reg
1368 * @oppfx 0x66
1369 * @opcpuid avx
1370 * @opgroup og_avx_pcksclr_datamove
1371 * @opxcpttype 5
1372 * @optest op1=1 op2=2 -> op1=2
1373 * @optest op1=0 op2=-42 -> op1=-42
1374 */
1375FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1376{
1377 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1378 if (IEM_IS_MODRM_MEM_MODE(bRm))
1379 {
1380 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1381 IEM_MC_BEGIN(0, 2);
1382 IEM_MC_LOCAL(uint64_t, uSrc);
1383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1384
1385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1386 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1387 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1388 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1389
1390 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1391 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1392
1393 IEM_MC_ADVANCE_RIP();
1394 IEM_MC_END();
1395 return VINF_SUCCESS;
1396 }
1397
1398 /**
1399 * @opdone
1400 * @opmnemonic udvex660f13m3
1401 * @opcode 0x13
1402 * @opcodesub 11 mr/reg
1403 * @oppfx 0x66
1404 * @opunused immediate
1405 * @opcpuid avx
1406 * @optest ->
1407 */
1408 return IEMOP_RAISE_INVALID_OPCODE();
1409}
1410
1411/* Opcode VEX.F3.0F 0x13 - invalid */
1412/* Opcode VEX.F2.0F 0x13 - invalid */
1413
1414/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1415FNIEMOP_DEF(iemOp_vunpcklps_Vx_Hx_Wx)
1416{
1417 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPS, vunpcklps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
1418 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklps);
1419 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1420}
1421
1422
1423/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1424FNIEMOP_DEF(iemOp_vunpcklpd_Vx_Hx_Wx)
1425{
1426 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPD, vunpcklpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
1427 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklpd);
1428 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1429}
1430
1431
1432/* Opcode VEX.F3.0F 0x14 - invalid */
1433/* Opcode VEX.F2.0F 0x14 - invalid */
1434
1435
1436/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1437FNIEMOP_DEF(iemOp_vunpckhps_Vx_Hx_Wx)
1438{
1439 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPS, vunpckhps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
1440 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhps);
1441 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1442}
1443
1444
1445/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1446FNIEMOP_DEF(iemOp_vunpckhpd_Vx_Hx_Wx)
1447{
1448 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPD, vunpckhpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
1449 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhpd);
1450 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1451}
1452
1453
1454/* Opcode VEX.F3.0F 0x15 - invalid */
1455/* Opcode VEX.F2.0F 0x15 - invalid */
1456
1457
1458FNIEMOP_DEF(iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq)
1459{
1460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1461 if (IEM_IS_MODRM_REG_MODE(bRm))
1462 {
1463 /**
1464 * @opcode 0x16
1465 * @opcodesub 11 mr/reg
1466 * @oppfx none
1467 * @opcpuid avx
1468 * @opgroup og_avx_simdfp_datamerge
1469 * @opxcpttype 7LZ
1470 */
1471 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVLHPS, vmovlhps, Vq_WO, Hq, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1472
1473 IEMOP_HLP_DONE_VEX_DECODING_L0();
1474 IEM_MC_BEGIN(0, 0);
1475
1476 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1477 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1478 IEM_MC_MERGE_YREG_U64LO_U64LO_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1479 IEM_GET_MODRM_RM(pVCpu, bRm),
1480 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1481
1482 IEM_MC_ADVANCE_RIP();
1483 IEM_MC_END();
1484 }
1485 else
1486 {
1487 /**
1488 * @opdone
1489 * @opcode 0x16
1490 * @opcodesub !11 mr/reg
1491 * @oppfx none
1492 * @opcpuid avx
1493 * @opgroup og_avx_simdfp_datamove
1494 * @opxcpttype 5LZ
1495 * @opfunction iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq
1496 */
1497 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPS, vmovhps, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1498
1499 IEM_MC_BEGIN(0, 2);
1500 IEM_MC_LOCAL(uint64_t, uSrc);
1501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1502
1503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1504 IEMOP_HLP_DONE_VEX_DECODING_L0();
1505 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1506 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1507
1508 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1509 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1510 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1511 uSrc);
1512
1513 IEM_MC_ADVANCE_RIP();
1514 IEM_MC_END();
1515 }
1516 return VINF_SUCCESS;
1517}
1518
1519
1520/**
1521 * @opcode 0x16
1522 * @opcodesub !11 mr/reg
1523 * @oppfx 0x66
1524 * @opcpuid avx
1525 * @opgroup og_avx_pcksclr_datamerge
1526 * @opxcpttype 5LZ
1527 */
1528FNIEMOP_DEF(iemOp_vmovhpd_Vdq_Hq_Mq)
1529{
1530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1531 if (IEM_IS_MODRM_MEM_MODE(bRm))
1532 {
1533 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPD, vmovhpd, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1534
1535 IEM_MC_BEGIN(0, 2);
1536 IEM_MC_LOCAL(uint64_t, uSrc);
1537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1538
1539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1540 IEMOP_HLP_DONE_VEX_DECODING_L0();
1541 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1542 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1543
1544 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1545 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1546 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1547 uSrc);
1548
1549 IEM_MC_ADVANCE_RIP();
1550 IEM_MC_END();
1551 return VINF_SUCCESS;
1552 }
1553
1554 /**
1555 * @opdone
1556 * @opmnemonic udvex660f16m3
1557 * @opcode 0x12
1558 * @opcodesub 11 mr/reg
1559 * @oppfx 0x66
1560 * @opunused immediate
1561 * @opcpuid avx
1562 * @optest ->
1563 */
1564 return IEMOP_RAISE_INVALID_OPCODE();
1565}
1566
1567
1568/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1569/**
1570 * @opcode 0x16
1571 * @oppfx 0xf3
1572 * @opcpuid avx
1573 * @opgroup og_avx_pcksclr_datamove
1574 * @opxcpttype 4
1575 */
1576FNIEMOP_DEF(iemOp_vmovshdup_Vx_Wx)
1577{
1578 IEMOP_MNEMONIC2(VEX_RM, VMOVSHDUP, vmovshdup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1579 Assert(pVCpu->iem.s.uVexLength <= 1);
1580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1581 if (IEM_IS_MODRM_REG_MODE(bRm))
1582 {
1583 /*
1584 * Register, register.
1585 */
1586 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1587 if (pVCpu->iem.s.uVexLength == 0)
1588 {
1589 IEM_MC_BEGIN(2, 0);
1590 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1591 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1592
1593 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1594 IEM_MC_PREPARE_AVX_USAGE();
1595
1596 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1597 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1598 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1599 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1600
1601 IEM_MC_ADVANCE_RIP();
1602 IEM_MC_END();
1603 }
1604 else
1605 {
1606 IEM_MC_BEGIN(3, 0);
1607 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1608 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1609 IEM_MC_ARG_CONST(uint8_t, iYRegSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 2);
1610
1611 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1612 IEM_MC_PREPARE_AVX_USAGE();
1613 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovshdup_256_rr, iYRegDst, iYRegSrc);
1614
1615 IEM_MC_ADVANCE_RIP();
1616 IEM_MC_END();
1617 }
1618 }
1619 else
1620 {
1621 /*
1622 * Register, memory.
1623 */
1624 if (pVCpu->iem.s.uVexLength == 0)
1625 {
1626 IEM_MC_BEGIN(2, 2);
1627 IEM_MC_LOCAL(RTUINT128U, uSrc);
1628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1629 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1630 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1631
1632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1633 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1634 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1635 IEM_MC_PREPARE_AVX_USAGE();
1636
1637 IEM_MC_FETCH_MEM_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1638 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1639 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_movshdup, puDst, puSrc);
1640 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1641
1642 IEM_MC_ADVANCE_RIP();
1643 IEM_MC_END();
1644 }
1645 else
1646 {
1647 IEM_MC_BEGIN(3, 2);
1648 IEM_MC_LOCAL(RTUINT256U, uSrc);
1649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1650 IEM_MC_IMPLICIT_AVX_AIMPL_ARGS();
1651 IEM_MC_ARG_CONST(uint8_t, iYRegDst, IEM_GET_MODRM_REG(pVCpu, bRm), 1);
1652 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 2);
1653
1654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1655 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1656 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1657 IEM_MC_PREPARE_AVX_USAGE();
1658
1659 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1660 IEM_MC_CALL_AVX_AIMPL_2(iemAImpl_vmovshdup_256_rm, iYRegDst, puSrc);
1661
1662 IEM_MC_ADVANCE_RIP();
1663 IEM_MC_END();
1664 }
1665 }
1666 return VINF_SUCCESS;
1667}
1668
1669
1670/* Opcode VEX.F2.0F 0x16 - invalid */
1671
1672
1673/**
1674 * @opcode 0x17
1675 * @opcodesub !11 mr/reg
1676 * @oppfx none
1677 * @opcpuid avx
1678 * @opgroup og_avx_simdfp_datamove
1679 * @opxcpttype 5
1680 */
1681FNIEMOP_DEF(iemOp_vmovhps_Mq_Vq)
1682{
1683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1684 if (IEM_IS_MODRM_MEM_MODE(bRm))
1685 {
1686 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPS, vmovhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1687
1688 IEM_MC_BEGIN(0, 2);
1689 IEM_MC_LOCAL(uint64_t, uSrc);
1690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1691
1692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1693 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1694 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1695 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1696
1697 IEM_MC_FETCH_YREG_2ND_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1698 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1699
1700 IEM_MC_ADVANCE_RIP();
1701 IEM_MC_END();
1702 return VINF_SUCCESS;
1703 }
1704
1705 /**
1706 * @opdone
1707 * @opmnemonic udvex0f17m3
1708 * @opcode 0x17
1709 * @opcodesub 11 mr/reg
1710 * @oppfx none
1711 * @opunused immediate
1712 * @opcpuid avx
1713 * @optest ->
1714 */
1715 return IEMOP_RAISE_INVALID_OPCODE();
1716}
1717
1718
1719/**
1720 * @opcode 0x17
1721 * @opcodesub !11 mr/reg
1722 * @oppfx 0x66
1723 * @opcpuid avx
1724 * @opgroup og_avx_pcksclr_datamove
1725 * @opxcpttype 5
1726 */
1727FNIEMOP_DEF(iemOp_vmovhpd_Mq_Vq)
1728{
1729 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1730 if (IEM_IS_MODRM_MEM_MODE(bRm))
1731 {
1732 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPD, vmovhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1733 IEM_MC_BEGIN(0, 2);
1734 IEM_MC_LOCAL(uint64_t, uSrc);
1735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1736
1737 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1738 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
1739 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1740 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1741
1742 IEM_MC_FETCH_YREG_2ND_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1743 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1744
1745 IEM_MC_ADVANCE_RIP();
1746 IEM_MC_END();
1747 return VINF_SUCCESS;
1748 }
1749
1750 /**
1751 * @opdone
1752 * @opmnemonic udvex660f17m3
1753 * @opcode 0x17
1754 * @opcodesub 11 mr/reg
1755 * @oppfx 0x66
1756 * @opunused immediate
1757 * @opcpuid avx
1758 * @optest ->
1759 */
1760 return IEMOP_RAISE_INVALID_OPCODE();
1761}
1762
1763
1764/* Opcode VEX.F3.0F 0x17 - invalid */
1765/* Opcode VEX.F2.0F 0x17 - invalid */
1766
1767
1768/* Opcode VEX.0F 0x18 - invalid */
1769/* Opcode VEX.0F 0x19 - invalid */
1770/* Opcode VEX.0F 0x1a - invalid */
1771/* Opcode VEX.0F 0x1b - invalid */
1772/* Opcode VEX.0F 0x1c - invalid */
1773/* Opcode VEX.0F 0x1d - invalid */
1774/* Opcode VEX.0F 0x1e - invalid */
1775/* Opcode VEX.0F 0x1f - invalid */
1776
1777/* Opcode VEX.0F 0x20 - invalid */
1778/* Opcode VEX.0F 0x21 - invalid */
1779/* Opcode VEX.0F 0x22 - invalid */
1780/* Opcode VEX.0F 0x23 - invalid */
1781/* Opcode VEX.0F 0x24 - invalid */
1782/* Opcode VEX.0F 0x25 - invalid */
1783/* Opcode VEX.0F 0x26 - invalid */
1784/* Opcode VEX.0F 0x27 - invalid */
1785
1786/**
1787 * @opcode 0x28
1788 * @oppfx none
1789 * @opcpuid avx
1790 * @opgroup og_avx_pcksclr_datamove
1791 * @opxcpttype 1
1792 * @optest op1=1 op2=2 -> op1=2
1793 * @optest op1=0 op2=-42 -> op1=-42
1794 * @note Almost identical to vmovapd.
1795 */
1796FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1797{
1798 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1800 Assert(pVCpu->iem.s.uVexLength <= 1);
1801 if (IEM_IS_MODRM_REG_MODE(bRm))
1802 {
1803 /*
1804 * Register, register.
1805 */
1806 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1807 IEM_MC_BEGIN(1, 0);
1808
1809 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1810 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1811 if (pVCpu->iem.s.uVexLength == 0)
1812 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1813 IEM_GET_MODRM_RM(pVCpu, bRm));
1814 else
1815 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1816 IEM_GET_MODRM_RM(pVCpu, bRm));
1817 IEM_MC_ADVANCE_RIP();
1818 IEM_MC_END();
1819 }
1820 else
1821 {
1822 /*
1823 * Register, memory.
1824 */
1825 if (pVCpu->iem.s.uVexLength == 0)
1826 {
1827 IEM_MC_BEGIN(0, 2);
1828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1829 IEM_MC_LOCAL(RTUINT128U, uSrc);
1830
1831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1832 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1833 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1834 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1835
1836 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1837 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1838
1839 IEM_MC_ADVANCE_RIP();
1840 IEM_MC_END();
1841 }
1842 else
1843 {
1844 IEM_MC_BEGIN(0, 2);
1845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1846 IEM_MC_LOCAL(RTUINT256U, uSrc);
1847
1848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1849 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1850 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1851 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1852
1853 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1854 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1855
1856 IEM_MC_ADVANCE_RIP();
1857 IEM_MC_END();
1858 }
1859 }
1860 return VINF_SUCCESS;
1861}
1862
1863
1864/**
1865 * @opcode 0x28
1866 * @oppfx 66
1867 * @opcpuid avx
1868 * @opgroup og_avx_pcksclr_datamove
1869 * @opxcpttype 1
1870 * @optest op1=1 op2=2 -> op1=2
1871 * @optest op1=0 op2=-42 -> op1=-42
1872 * @note Almost identical to vmovaps
1873 */
1874FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1875{
1876 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1877 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1878 Assert(pVCpu->iem.s.uVexLength <= 1);
1879 if (IEM_IS_MODRM_REG_MODE(bRm))
1880 {
1881 /*
1882 * Register, register.
1883 */
1884 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1885 IEM_MC_BEGIN(1, 0);
1886
1887 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1888 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1889 if (pVCpu->iem.s.uVexLength == 0)
1890 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1891 IEM_GET_MODRM_RM(pVCpu, bRm));
1892 else
1893 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1894 IEM_GET_MODRM_RM(pVCpu, bRm));
1895 IEM_MC_ADVANCE_RIP();
1896 IEM_MC_END();
1897 }
1898 else
1899 {
1900 /*
1901 * Register, memory.
1902 */
1903 if (pVCpu->iem.s.uVexLength == 0)
1904 {
1905 IEM_MC_BEGIN(0, 2);
1906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1907 IEM_MC_LOCAL(RTUINT128U, uSrc);
1908
1909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1910 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1911 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1912 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1913
1914 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1915 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1916
1917 IEM_MC_ADVANCE_RIP();
1918 IEM_MC_END();
1919 }
1920 else
1921 {
1922 IEM_MC_BEGIN(0, 2);
1923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1924 IEM_MC_LOCAL(RTUINT256U, uSrc);
1925
1926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1927 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1928 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1929 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1930
1931 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1932 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1933
1934 IEM_MC_ADVANCE_RIP();
1935 IEM_MC_END();
1936 }
1937 }
1938 return VINF_SUCCESS;
1939}
1940
1941/**
1942 * @opmnemonic udvexf30f28
1943 * @opcode 0x28
1944 * @oppfx 0xf3
1945 * @opunused vex.modrm
1946 * @opcpuid avx
1947 * @optest ->
1948 * @opdone
1949 */
1950
1951/**
1952 * @opmnemonic udvexf20f28
1953 * @opcode 0x28
1954 * @oppfx 0xf2
1955 * @opunused vex.modrm
1956 * @opcpuid avx
1957 * @optest ->
1958 * @opdone
1959 */
1960
1961/**
1962 * @opcode 0x29
1963 * @oppfx none
1964 * @opcpuid avx
1965 * @opgroup og_avx_pcksclr_datamove
1966 * @opxcpttype 1
1967 * @optest op1=1 op2=2 -> op1=2
1968 * @optest op1=0 op2=-42 -> op1=-42
1969 * @note Almost identical to vmovapd.
1970 */
1971FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1972{
1973 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1975 Assert(pVCpu->iem.s.uVexLength <= 1);
1976 if (IEM_IS_MODRM_REG_MODE(bRm))
1977 {
1978 /*
1979 * Register, register.
1980 */
1981 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
1982 IEM_MC_BEGIN(1, 0);
1983
1984 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1985 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1986 if (pVCpu->iem.s.uVexLength == 0)
1987 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1988 IEM_GET_MODRM_REG(pVCpu, bRm));
1989 else
1990 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1991 IEM_GET_MODRM_REG(pVCpu, bRm));
1992 IEM_MC_ADVANCE_RIP();
1993 IEM_MC_END();
1994 }
1995 else
1996 {
1997 /*
1998 * Register, memory.
1999 */
2000 if (pVCpu->iem.s.uVexLength == 0)
2001 {
2002 IEM_MC_BEGIN(0, 2);
2003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2004 IEM_MC_LOCAL(RTUINT128U, uSrc);
2005
2006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2007 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2008 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2009 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2010
2011 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2012 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2013
2014 IEM_MC_ADVANCE_RIP();
2015 IEM_MC_END();
2016 }
2017 else
2018 {
2019 IEM_MC_BEGIN(0, 2);
2020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2021 IEM_MC_LOCAL(RTUINT256U, uSrc);
2022
2023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2024 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2025 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2026 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2027
2028 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2029 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2030
2031 IEM_MC_ADVANCE_RIP();
2032 IEM_MC_END();
2033 }
2034 }
2035 return VINF_SUCCESS;
2036}
2037
2038/**
2039 * @opcode 0x29
2040 * @oppfx 66
2041 * @opcpuid avx
2042 * @opgroup og_avx_pcksclr_datamove
2043 * @opxcpttype 1
2044 * @optest op1=1 op2=2 -> op1=2
2045 * @optest op1=0 op2=-42 -> op1=-42
2046 * @note Almost identical to vmovaps
2047 */
2048FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
2049{
2050 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2051 Assert(pVCpu->iem.s.uVexLength <= 1);
2052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2053 if (IEM_IS_MODRM_REG_MODE(bRm))
2054 {
2055 /*
2056 * Register, register.
2057 */
2058 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2059 IEM_MC_BEGIN(1, 0);
2060
2061 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2062 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2063 if (pVCpu->iem.s.uVexLength == 0)
2064 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2065 IEM_GET_MODRM_REG(pVCpu, bRm));
2066 else
2067 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2068 IEM_GET_MODRM_REG(pVCpu, bRm));
2069 IEM_MC_ADVANCE_RIP();
2070 IEM_MC_END();
2071 }
2072 else
2073 {
2074 /*
2075 * Register, memory.
2076 */
2077 if (pVCpu->iem.s.uVexLength == 0)
2078 {
2079 IEM_MC_BEGIN(0, 2);
2080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2081 IEM_MC_LOCAL(RTUINT128U, uSrc);
2082
2083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2084 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2085 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2086 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2087
2088 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2089 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2090
2091 IEM_MC_ADVANCE_RIP();
2092 IEM_MC_END();
2093 }
2094 else
2095 {
2096 IEM_MC_BEGIN(0, 2);
2097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2098 IEM_MC_LOCAL(RTUINT256U, uSrc);
2099
2100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2101 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2102 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2103 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2104
2105 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2106 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2107
2108 IEM_MC_ADVANCE_RIP();
2109 IEM_MC_END();
2110 }
2111 }
2112 return VINF_SUCCESS;
2113}
2114
2115
2116/**
2117 * @opmnemonic udvexf30f29
2118 * @opcode 0x29
2119 * @oppfx 0xf3
2120 * @opunused vex.modrm
2121 * @opcpuid avx
2122 * @optest ->
2123 * @opdone
2124 */
2125
2126/**
2127 * @opmnemonic udvexf20f29
2128 * @opcode 0x29
2129 * @oppfx 0xf2
2130 * @opunused vex.modrm
2131 * @opcpuid avx
2132 * @optest ->
2133 * @opdone
2134 */
2135
2136
2137/** Opcode VEX.0F 0x2a - invalid */
2138/** Opcode VEX.66.0F 0x2a - invalid */
2139/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
2140FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey);
2141/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2142FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey);
2143
2144
2145/**
2146 * @opcode 0x2b
2147 * @opcodesub !11 mr/reg
2148 * @oppfx none
2149 * @opcpuid avx
2150 * @opgroup og_avx_cachect
2151 * @opxcpttype 1
2152 * @optest op1=1 op2=2 -> op1=2
2153 * @optest op1=0 op2=-42 -> op1=-42
2154 * @note Identical implementation to vmovntpd
2155 */
2156FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
2157{
2158 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2159 Assert(pVCpu->iem.s.uVexLength <= 1);
2160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2161 if (IEM_IS_MODRM_MEM_MODE(bRm))
2162 {
2163 /*
2164 * memory, register.
2165 */
2166 if (pVCpu->iem.s.uVexLength == 0)
2167 {
2168 IEM_MC_BEGIN(0, 2);
2169 IEM_MC_LOCAL(RTUINT128U, uSrc);
2170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2171
2172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2173 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2174 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2175 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2176
2177 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2178 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2179
2180 IEM_MC_ADVANCE_RIP();
2181 IEM_MC_END();
2182 }
2183 else
2184 {
2185 IEM_MC_BEGIN(0, 2);
2186 IEM_MC_LOCAL(RTUINT256U, uSrc);
2187 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2188
2189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2190 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2191 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2192 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2193
2194 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2195 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2196
2197 IEM_MC_ADVANCE_RIP();
2198 IEM_MC_END();
2199 }
2200 }
2201 /* The register, register encoding is invalid. */
2202 else
2203 return IEMOP_RAISE_INVALID_OPCODE();
2204 return VINF_SUCCESS;
2205}
2206
2207/**
2208 * @opcode 0x2b
2209 * @opcodesub !11 mr/reg
2210 * @oppfx 0x66
2211 * @opcpuid avx
2212 * @opgroup og_avx_cachect
2213 * @opxcpttype 1
2214 * @optest op1=1 op2=2 -> op1=2
2215 * @optest op1=0 op2=-42 -> op1=-42
2216 * @note Identical implementation to vmovntps
2217 */
2218FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
2219{
2220 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2221 Assert(pVCpu->iem.s.uVexLength <= 1);
2222 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2223 if (IEM_IS_MODRM_MEM_MODE(bRm))
2224 {
2225 /*
2226 * memory, register.
2227 */
2228 if (pVCpu->iem.s.uVexLength == 0)
2229 {
2230 IEM_MC_BEGIN(0, 2);
2231 IEM_MC_LOCAL(RTUINT128U, uSrc);
2232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2233
2234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2235 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2236 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2237 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2238
2239 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2240 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2241
2242 IEM_MC_ADVANCE_RIP();
2243 IEM_MC_END();
2244 }
2245 else
2246 {
2247 IEM_MC_BEGIN(0, 2);
2248 IEM_MC_LOCAL(RTUINT256U, uSrc);
2249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2250
2251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2252 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
2253 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2254 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2255
2256 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2257 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2258
2259 IEM_MC_ADVANCE_RIP();
2260 IEM_MC_END();
2261 }
2262 }
2263 /* The register, register encoding is invalid. */
2264 else
2265 return IEMOP_RAISE_INVALID_OPCODE();
2266 return VINF_SUCCESS;
2267}
2268
2269/**
2270 * @opmnemonic udvexf30f2b
2271 * @opcode 0x2b
2272 * @oppfx 0xf3
2273 * @opunused vex.modrm
2274 * @opcpuid avx
2275 * @optest ->
2276 * @opdone
2277 */
2278
2279/**
2280 * @opmnemonic udvexf20f2b
2281 * @opcode 0x2b
2282 * @oppfx 0xf2
2283 * @opunused vex.modrm
2284 * @opcpuid avx
2285 * @optest ->
2286 * @opdone
2287 */
2288
2289
2290/* Opcode VEX.0F 0x2c - invalid */
2291/* Opcode VEX.66.0F 0x2c - invalid */
2292/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
2293FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2294/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
2295FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2296
2297/* Opcode VEX.0F 0x2d - invalid */
2298/* Opcode VEX.66.0F 0x2d - invalid */
2299/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
2300FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2301/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
2302FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2303
2304
2305/** Opcode VEX.0F 0x2e - vucomiss Vss, Wss */
2306FNIEMOP_DEF(iemOp_vucomiss_Vss_Wss)
2307{
2308 IEMOP_MNEMONIC2(RM, VUCOMISS, vucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2309 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2310 if (IEM_IS_MODRM_REG_MODE(bRm))
2311 {
2312 /*
2313 * Register, register.
2314 */
2315 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2316 IEM_MC_BEGIN(4, 1);
2317 IEM_MC_LOCAL(uint32_t, fEFlags);
2318 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2319 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2320 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2321 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2322 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2323 IEM_MC_PREPARE_AVX_USAGE();
2324 IEM_MC_FETCH_EFLAGS(fEFlags);
2325 IEM_MC_REF_MXCSR(pfMxcsr);
2326 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2327 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2328 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2329 pfMxcsr, pEFlags, puSrc1, puSrc2);
2330 IEM_MC_IF_MXCSR_XCPT_PENDING()
2331 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2332 IEM_MC_ELSE()
2333 IEM_MC_COMMIT_EFLAGS(fEFlags);
2334 IEM_MC_ENDIF();
2335
2336 IEM_MC_ADVANCE_RIP();
2337 IEM_MC_END();
2338 }
2339 else
2340 {
2341 /*
2342 * Register, memory.
2343 */
2344 IEM_MC_BEGIN(4, 3);
2345 IEM_MC_LOCAL(uint32_t, fEFlags);
2346 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2347 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2348 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2349 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2350 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2351 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2352
2353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2354 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2355 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2356 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2357
2358 IEM_MC_PREPARE_AVX_USAGE();
2359 IEM_MC_REF_MXCSR(pfMxcsr);
2360 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2361 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2362 pfMxcsr, pEFlags, puSrc1, puSrc2);
2363 IEM_MC_IF_MXCSR_XCPT_PENDING()
2364 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2365 IEM_MC_ELSE()
2366 IEM_MC_COMMIT_EFLAGS(fEFlags);
2367 IEM_MC_ENDIF();
2368
2369 IEM_MC_ADVANCE_RIP();
2370 IEM_MC_END();
2371 }
2372 return VINF_SUCCESS;
2373}
2374
2375
2376/** Opcode VEX.66.0F 0x2e - vucomisd Vsd, Wsd */
2377FNIEMOP_DEF(iemOp_vucomisd_Vsd_Wsd)
2378{
2379 IEMOP_MNEMONIC2(RM, VUCOMISD, vucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2380 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2381 if (IEM_IS_MODRM_REG_MODE(bRm))
2382 {
2383 /*
2384 * Register, register.
2385 */
2386 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2387 IEM_MC_BEGIN(4, 1);
2388 IEM_MC_LOCAL(uint32_t, fEFlags);
2389 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2390 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2391 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2392 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2393 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2394 IEM_MC_PREPARE_AVX_USAGE();
2395 IEM_MC_FETCH_EFLAGS(fEFlags);
2396 IEM_MC_REF_MXCSR(pfMxcsr);
2397 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2398 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2399 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2400 pfMxcsr, pEFlags, puSrc1, puSrc2);
2401 IEM_MC_IF_MXCSR_XCPT_PENDING()
2402 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2403 IEM_MC_ELSE()
2404 IEM_MC_COMMIT_EFLAGS(fEFlags);
2405 IEM_MC_ENDIF();
2406
2407 IEM_MC_ADVANCE_RIP();
2408 IEM_MC_END();
2409 }
2410 else
2411 {
2412 /*
2413 * Register, memory.
2414 */
2415 IEM_MC_BEGIN(4, 3);
2416 IEM_MC_LOCAL(uint32_t, fEFlags);
2417 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2418 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2419 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2420 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2421 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2422 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2423
2424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2425 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2426 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2427 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2428
2429 IEM_MC_PREPARE_AVX_USAGE();
2430 IEM_MC_REF_MXCSR(pfMxcsr);
2431 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2432 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2433 pfMxcsr, pEFlags, puSrc1, puSrc2);
2434 IEM_MC_IF_MXCSR_XCPT_PENDING()
2435 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2436 IEM_MC_ELSE()
2437 IEM_MC_COMMIT_EFLAGS(fEFlags);
2438 IEM_MC_ENDIF();
2439
2440 IEM_MC_ADVANCE_RIP();
2441 IEM_MC_END();
2442 }
2443 return VINF_SUCCESS;
2444}
2445
2446
2447/* Opcode VEX.F3.0F 0x2e - invalid */
2448/* Opcode VEX.F2.0F 0x2e - invalid */
2449
2450/** Opcode VEX.0F 0x2f - vcomiss Vss, Wss */
2451FNIEMOP_DEF(iemOp_vcomiss_Vss_Wss)
2452{
2453 IEMOP_MNEMONIC2(RM, VCOMISS, vcomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2454 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2455 if (IEM_IS_MODRM_REG_MODE(bRm))
2456 {
2457 /*
2458 * Register, register.
2459 */
2460 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2461 IEM_MC_BEGIN(4, 1);
2462 IEM_MC_LOCAL(uint32_t, fEFlags);
2463 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2464 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2465 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2466 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2467 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2468 IEM_MC_PREPARE_AVX_USAGE();
2469 IEM_MC_FETCH_EFLAGS(fEFlags);
2470 IEM_MC_REF_MXCSR(pfMxcsr);
2471 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2472 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2473 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2474 pfMxcsr, pEFlags, puSrc1, puSrc2);
2475 IEM_MC_IF_MXCSR_XCPT_PENDING()
2476 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2477 IEM_MC_ELSE()
2478 IEM_MC_COMMIT_EFLAGS(fEFlags);
2479 IEM_MC_ENDIF();
2480
2481 IEM_MC_ADVANCE_RIP();
2482 IEM_MC_END();
2483 }
2484 else
2485 {
2486 /*
2487 * Register, memory.
2488 */
2489 IEM_MC_BEGIN(4, 3);
2490 IEM_MC_LOCAL(uint32_t, fEFlags);
2491 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2492 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2493 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2494 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2495 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2497
2498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2499 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2500 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2501 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2502
2503 IEM_MC_PREPARE_AVX_USAGE();
2504 IEM_MC_REF_MXCSR(pfMxcsr);
2505 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2506 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2507 pfMxcsr, pEFlags, puSrc1, puSrc2);
2508 IEM_MC_IF_MXCSR_XCPT_PENDING()
2509 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2510 IEM_MC_ELSE()
2511 IEM_MC_COMMIT_EFLAGS(fEFlags);
2512 IEM_MC_ENDIF();
2513
2514 IEM_MC_ADVANCE_RIP();
2515 IEM_MC_END();
2516 }
2517 return VINF_SUCCESS;
2518}
2519
2520
2521/** Opcode VEX.66.0F 0x2f - vcomisd Vsd, Wsd */
2522FNIEMOP_DEF(iemOp_vcomisd_Vsd_Wsd)
2523{
2524 IEMOP_MNEMONIC2(RM, VCOMISD, vcomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2526 if (IEM_IS_MODRM_REG_MODE(bRm))
2527 {
2528 /*
2529 * Register, register.
2530 */
2531 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2532 IEM_MC_BEGIN(4, 1);
2533 IEM_MC_LOCAL(uint32_t, fEFlags);
2534 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2535 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2536 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2537 IEM_MC_ARG(PCX86XMMREG, puSrc2, 3);
2538 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2539 IEM_MC_PREPARE_AVX_USAGE();
2540 IEM_MC_FETCH_EFLAGS(fEFlags);
2541 IEM_MC_REF_MXCSR(pfMxcsr);
2542 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2543 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
2544 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2545 pfMxcsr, pEFlags, puSrc1, puSrc2);
2546 IEM_MC_IF_MXCSR_XCPT_PENDING()
2547 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2548 IEM_MC_ELSE()
2549 IEM_MC_COMMIT_EFLAGS(fEFlags);
2550 IEM_MC_ENDIF();
2551
2552 IEM_MC_ADVANCE_RIP();
2553 IEM_MC_END();
2554 }
2555 else
2556 {
2557 /*
2558 * Register, memory.
2559 */
2560 IEM_MC_BEGIN(4, 3);
2561 IEM_MC_LOCAL(uint32_t, fEFlags);
2562 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
2563 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 1);
2564 IEM_MC_ARG(PCX86XMMREG, puSrc1, 2);
2565 IEM_MC_LOCAL(X86XMMREG, uSrc2);
2566 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 3);
2567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2568
2569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2570 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
2571 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2572 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2573
2574 IEM_MC_PREPARE_AVX_USAGE();
2575 IEM_MC_REF_MXCSR(pfMxcsr);
2576 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
2577 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2578 pfMxcsr, pEFlags, puSrc1, puSrc2);
2579 IEM_MC_IF_MXCSR_XCPT_PENDING()
2580 IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2581 IEM_MC_ELSE()
2582 IEM_MC_COMMIT_EFLAGS(fEFlags);
2583 IEM_MC_ENDIF();
2584
2585 IEM_MC_ADVANCE_RIP();
2586 IEM_MC_END();
2587 }
2588 return VINF_SUCCESS;
2589}
2590
2591
2592/* Opcode VEX.F3.0F 0x2f - invalid */
2593/* Opcode VEX.F2.0F 0x2f - invalid */
2594
2595/* Opcode VEX.0F 0x30 - invalid */
2596/* Opcode VEX.0F 0x31 - invalid */
2597/* Opcode VEX.0F 0x32 - invalid */
2598/* Opcode VEX.0F 0x33 - invalid */
2599/* Opcode VEX.0F 0x34 - invalid */
2600/* Opcode VEX.0F 0x35 - invalid */
2601/* Opcode VEX.0F 0x36 - invalid */
2602/* Opcode VEX.0F 0x37 - invalid */
2603/* Opcode VEX.0F 0x38 - invalid */
2604/* Opcode VEX.0F 0x39 - invalid */
2605/* Opcode VEX.0F 0x3a - invalid */
2606/* Opcode VEX.0F 0x3b - invalid */
2607/* Opcode VEX.0F 0x3c - invalid */
2608/* Opcode VEX.0F 0x3d - invalid */
2609/* Opcode VEX.0F 0x3e - invalid */
2610/* Opcode VEX.0F 0x3f - invalid */
2611/* Opcode VEX.0F 0x40 - invalid */
2612/* Opcode VEX.0F 0x41 - invalid */
2613/* Opcode VEX.0F 0x42 - invalid */
2614/* Opcode VEX.0F 0x43 - invalid */
2615/* Opcode VEX.0F 0x44 - invalid */
2616/* Opcode VEX.0F 0x45 - invalid */
2617/* Opcode VEX.0F 0x46 - invalid */
2618/* Opcode VEX.0F 0x47 - invalid */
2619/* Opcode VEX.0F 0x48 - invalid */
2620/* Opcode VEX.0F 0x49 - invalid */
2621/* Opcode VEX.0F 0x4a - invalid */
2622/* Opcode VEX.0F 0x4b - invalid */
2623/* Opcode VEX.0F 0x4c - invalid */
2624/* Opcode VEX.0F 0x4d - invalid */
2625/* Opcode VEX.0F 0x4e - invalid */
2626/* Opcode VEX.0F 0x4f - invalid */
2627
2628
2629/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
2630FNIEMOP_DEF(iemOp_vmovmskps_Gy_Ups)
2631{
2632 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPS, vmovmskps, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2633 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2634 if (IEM_IS_MODRM_REG_MODE(bRm))
2635 {
2636 /*
2637 * Register, register.
2638 */
2639 if (pVCpu->iem.s.uVexLength == 0)
2640 {
2641 IEMOP_HLP_DONE_VEX_DECODING();
2642 IEM_MC_BEGIN(2, 1);
2643 IEM_MC_LOCAL(uint8_t, u8Dst);
2644 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2645 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2646 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2647 IEM_MC_PREPARE_AVX_USAGE();
2648 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2649 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskps_u128, iemAImpl_vmovmskps_u128_fallback),
2650 pu8Dst, puSrc);
2651 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2652 IEM_MC_ADVANCE_RIP();
2653 IEM_MC_END();
2654 }
2655 else
2656 {
2657 IEMOP_HLP_DONE_VEX_DECODING();
2658 IEM_MC_BEGIN(2, 2);
2659 IEM_MC_LOCAL(uint8_t, u8Dst);
2660 IEM_MC_LOCAL(RTUINT256U, uSrc);
2661 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2662 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2663
2664 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
2665 IEM_MC_PREPARE_AVX_USAGE();
2666 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2667 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskps_u256, iemAImpl_vmovmskps_u256_fallback),
2668 pu8Dst, puSrc);
2669 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2670 IEM_MC_ADVANCE_RIP();
2671 IEM_MC_END();
2672 }
2673 return VINF_SUCCESS;
2674 }
2675
2676 /* No memory operand. */
2677 return IEMOP_RAISE_INVALID_OPCODE();
2678}
2679
2680
2681/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
2682FNIEMOP_DEF(iemOp_vmovmskpd_Gy_Upd)
2683{
2684{
2685 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPD, vmovmskpd, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2686 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2687 if (IEM_IS_MODRM_REG_MODE(bRm))
2688 {
2689 /*
2690 * Register, register.
2691 */
2692 if (pVCpu->iem.s.uVexLength == 0)
2693 {
2694 IEMOP_HLP_DONE_VEX_DECODING();
2695 IEM_MC_BEGIN(2, 1);
2696 IEM_MC_LOCAL(uint8_t, u8Dst);
2697 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2698 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2699 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2700 IEM_MC_PREPARE_AVX_USAGE();
2701 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2702 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskpd_u128, iemAImpl_vmovmskpd_u128_fallback),
2703 pu8Dst, puSrc);
2704 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2705 IEM_MC_ADVANCE_RIP();
2706 IEM_MC_END();
2707 }
2708 else
2709 {
2710 IEMOP_HLP_DONE_VEX_DECODING();
2711 IEM_MC_BEGIN(2, 2);
2712 IEM_MC_LOCAL(uint8_t, u8Dst);
2713 IEM_MC_LOCAL(RTUINT256U, uSrc);
2714 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2715 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2716
2717 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
2718 IEM_MC_PREPARE_AVX_USAGE();
2719 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2720 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskpd_u256, iemAImpl_vmovmskpd_u256_fallback),
2721 pu8Dst, puSrc);
2722 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2723 IEM_MC_ADVANCE_RIP();
2724 IEM_MC_END();
2725 }
2726 return VINF_SUCCESS;
2727 }
2728
2729 /* No memory operand. */
2730 return IEMOP_RAISE_INVALID_OPCODE();
2731}
2732}
2733
2734
2735/* Opcode VEX.F3.0F 0x50 - invalid */
2736/* Opcode VEX.F2.0F 0x50 - invalid */
2737
2738/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
2739FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2740/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
2741FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2742/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
2743FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2744/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2745FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2746
2747/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
2748FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2749/* Opcode VEX.66.0F 0x52 - invalid */
2750/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
2751FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2752/* Opcode VEX.F2.0F 0x52 - invalid */
2753
2754/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
2755FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2756/* Opcode VEX.66.0F 0x53 - invalid */
2757/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
2758FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2759/* Opcode VEX.F2.0F 0x53 - invalid */
2760
2761
2762/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
2763FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
2764{
2765 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2766 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2767 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2768}
2769
2770
2771/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
2772FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
2773{
2774 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2775 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2776 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2777}
2778
2779
2780/* Opcode VEX.F3.0F 0x54 - invalid */
2781/* Opcode VEX.F2.0F 0x54 - invalid */
2782
2783
2784/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
2785FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
2786{
2787 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2788 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2789 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2790}
2791
2792
2793/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
2794FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
2795{
2796 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2797 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2798 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2799}
2800
2801
2802/* Opcode VEX.F3.0F 0x55 - invalid */
2803/* Opcode VEX.F2.0F 0x55 - invalid */
2804
2805/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
2806FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
2807{
2808 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2809 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2810 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2811}
2812
2813
2814/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
2815FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
2816{
2817 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2818 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2819 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2820}
2821
2822
2823/* Opcode VEX.F3.0F 0x56 - invalid */
2824/* Opcode VEX.F2.0F 0x56 - invalid */
2825
2826
2827/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
2828FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
2829{
2830 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2831 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2832 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2833}
2834
2835
2836/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
2837FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
2838{
2839 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2840 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
2841 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2842}
2843
2844
2845/* Opcode VEX.F3.0F 0x57 - invalid */
2846/* Opcode VEX.F2.0F 0x57 - invalid */
2847
2848/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
2849FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps);
2850/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
2851FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2852/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
2853FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2854/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
2855FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2856
2857/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
2858FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2859/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
2860FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2861/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
2862FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2863/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
2864FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2865
2866/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
2867FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2868/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
2869FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2870/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
2871FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2872/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2873FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2874
2875/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
2876FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2877/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
2878FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2879/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
2880FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2881/* Opcode VEX.F2.0F 0x5b - invalid */
2882
2883/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
2884FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2885/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
2886FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2887/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
2888FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2889/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
2890FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2891
2892/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
2893FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2894/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
2895FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2896/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
2897FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2898/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
2899FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2900
2901/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
2902FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2903/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
2904FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2905/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
2906FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2907/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
2908FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2909
2910/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
2911FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2912/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
2913FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2914/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
2915FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2916/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
2917FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2918
2919
2920/* Opcode VEX.0F 0x60 - invalid */
2921
2922
2923/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
2924FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2925{
2926 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2927 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklbw);
2928 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2929}
2930
2931
2932/* Opcode VEX.F3.0F 0x60 - invalid */
2933
2934
2935/* Opcode VEX.0F 0x61 - invalid */
2936
2937
2938/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
2939FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2940{
2941 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2942 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklwd);
2943 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2944}
2945
2946
2947/* Opcode VEX.F3.0F 0x61 - invalid */
2948
2949
2950/* Opcode VEX.0F 0x62 - invalid */
2951
2952/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
2953FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2954{
2955 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2956 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckldq);
2957 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2958}
2959
2960
2961/* Opcode VEX.F3.0F 0x62 - invalid */
2962
2963
2964
2965/* Opcode VEX.0F 0x63 - invalid */
2966
2967
2968/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
2969FNIEMOP_DEF(iemOp_vpacksswb_Vx_Hx_Wx)
2970{
2971 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSWB, vpacksswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
2972 IEMOPMEDIAOPTF3_INIT_VARS( vpacksswb);
2973 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2974}
2975
2976
2977/* Opcode VEX.F3.0F 0x63 - invalid */
2978
2979/* Opcode VEX.0F 0x64 - invalid */
2980
2981
2982/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
2983FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
2984{
2985 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2986 IEMOPMEDIAF3_INIT_VARS( vpcmpgtb);
2987 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2988}
2989
2990
2991/* Opcode VEX.F3.0F 0x64 - invalid */
2992
2993/* Opcode VEX.0F 0x65 - invalid */
2994
2995
2996/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
2997FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
2998{
2999 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3000 IEMOPMEDIAF3_INIT_VARS( vpcmpgtw);
3001 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3002}
3003
3004
3005/* Opcode VEX.F3.0F 0x65 - invalid */
3006
3007/* Opcode VEX.0F 0x66 - invalid */
3008
3009
3010/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
3011FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
3012{
3013 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3014 IEMOPMEDIAF3_INIT_VARS( vpcmpgtd);
3015 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3016}
3017
3018
3019/* Opcode VEX.F3.0F 0x66 - invalid */
3020
3021/* Opcode VEX.0F 0x67 - invalid */
3022
3023
3024/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
3025FNIEMOP_DEF(iemOp_vpackuswb_Vx_Hx_W)
3026{
3027 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSWB, vpackuswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3028 IEMOPMEDIAOPTF3_INIT_VARS( vpackuswb);
3029 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3030}
3031
3032
3033/* Opcode VEX.F3.0F 0x67 - invalid */
3034
3035
3036///**
3037// * Common worker for SSE2 instructions on the form:
3038// * pxxxx xmm1, xmm2/mem128
3039// *
3040// * The 2nd operand is the second half of a register, which in the memory case
3041// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3042// * where it may read the full 128 bits or only the upper 64 bits.
3043// *
3044// * Exceptions type 4.
3045// */
3046//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3047//{
3048// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3049// if (IEM_IS_MODRM_REG_MODE(bRm))
3050// {
3051// /*
3052// * Register, register.
3053// */
3054// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3055// IEM_MC_BEGIN(2, 0);
3056// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3057// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3058// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3059// IEM_MC_PREPARE_SSE_USAGE();
3060// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3061// IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3062// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3063// IEM_MC_ADVANCE_RIP();
3064// IEM_MC_END();
3065// }
3066// else
3067// {
3068// /*
3069// * Register, memory.
3070// */
3071// IEM_MC_BEGIN(2, 2);
3072// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3073// IEM_MC_LOCAL(RTUINT128U, uSrc);
3074// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3075// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3076//
3077// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3078// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3079// IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3080// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3081//
3082// IEM_MC_PREPARE_SSE_USAGE();
3083// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3084// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3085//
3086// IEM_MC_ADVANCE_RIP();
3087// IEM_MC_END();
3088// }
3089// return VINF_SUCCESS;
3090//}
3091
3092
3093/* Opcode VEX.0F 0x68 - invalid */
3094
3095/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
3096FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
3097{
3098 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3099 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhbw);
3100 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3101}
3102
3103
3104/* Opcode VEX.F3.0F 0x68 - invalid */
3105
3106
3107/* Opcode VEX.0F 0x69 - invalid */
3108
3109
3110/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
3111FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
3112{
3113 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3114 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhwd);
3115 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3116}
3117
3118
3119/* Opcode VEX.F3.0F 0x69 - invalid */
3120
3121
3122/* Opcode VEX.0F 0x6a - invalid */
3123
3124
3125/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
3126FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
3127{
3128 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3129 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhdq);
3130 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3131}
3132
3133
3134/* Opcode VEX.F3.0F 0x6a - invalid */
3135
3136
3137/* Opcode VEX.0F 0x6b - invalid */
3138
3139
3140/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
3141FNIEMOP_DEF(iemOp_vpackssdw_Vx_Hx_Wx)
3142{
3143 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSDW, vpackssdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3144 IEMOPMEDIAOPTF3_INIT_VARS( vpackssdw);
3145 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3146}
3147
3148
3149/* Opcode VEX.F3.0F 0x6b - invalid */
3150
3151
3152/* Opcode VEX.0F 0x6c - invalid */
3153
3154
3155/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
3156FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
3157{
3158 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3159 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklqdq);
3160 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3161}
3162
3163
3164/* Opcode VEX.F3.0F 0x6c - invalid */
3165/* Opcode VEX.F2.0F 0x6c - invalid */
3166
3167
3168/* Opcode VEX.0F 0x6d - invalid */
3169
3170
3171/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
3172FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
3173{
3174 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3175 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhqdq);
3176 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3177}
3178
3179
3180/* Opcode VEX.F3.0F 0x6d - invalid */
3181
3182
3183/* Opcode VEX.0F 0x6e - invalid */
3184
3185FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
3186{
3187 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3188 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3189 {
3190 /**
3191 * @opcode 0x6e
3192 * @opcodesub rex.w=1
3193 * @oppfx 0x66
3194 * @opcpuid avx
3195 * @opgroup og_avx_simdint_datamov
3196 * @opxcpttype 5
3197 * @optest 64-bit / op1=1 op2=2 -> op1=2
3198 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3199 */
3200 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3201 if (IEM_IS_MODRM_REG_MODE(bRm))
3202 {
3203 /* XMM, greg64 */
3204 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3205 IEM_MC_BEGIN(0, 1);
3206 IEM_MC_LOCAL(uint64_t, u64Tmp);
3207
3208 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3209 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3210
3211 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3212 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3213
3214 IEM_MC_ADVANCE_RIP();
3215 IEM_MC_END();
3216 }
3217 else
3218 {
3219 /* XMM, [mem64] */
3220 IEM_MC_BEGIN(0, 2);
3221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3222 IEM_MC_LOCAL(uint64_t, u64Tmp);
3223
3224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3225 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3226 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3227 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3228
3229 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3230 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3231
3232 IEM_MC_ADVANCE_RIP();
3233 IEM_MC_END();
3234 }
3235 }
3236 else
3237 {
3238 /**
3239 * @opdone
3240 * @opcode 0x6e
3241 * @opcodesub rex.w=0
3242 * @oppfx 0x66
3243 * @opcpuid avx
3244 * @opgroup og_avx_simdint_datamov
3245 * @opxcpttype 5
3246 * @opfunction iemOp_vmovd_q_Vy_Ey
3247 * @optest op1=1 op2=2 -> op1=2
3248 * @optest op1=0 op2=-42 -> op1=-42
3249 */
3250 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3251 if (IEM_IS_MODRM_REG_MODE(bRm))
3252 {
3253 /* XMM, greg32 */
3254 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3255 IEM_MC_BEGIN(0, 1);
3256 IEM_MC_LOCAL(uint32_t, u32Tmp);
3257
3258 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3259 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3260
3261 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3262 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3263
3264 IEM_MC_ADVANCE_RIP();
3265 IEM_MC_END();
3266 }
3267 else
3268 {
3269 /* XMM, [mem32] */
3270 IEM_MC_BEGIN(0, 2);
3271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3272 IEM_MC_LOCAL(uint32_t, u32Tmp);
3273
3274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3275 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3276 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3277 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3278
3279 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3280 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3281
3282 IEM_MC_ADVANCE_RIP();
3283 IEM_MC_END();
3284 }
3285 }
3286 return VINF_SUCCESS;
3287}
3288
3289
3290/* Opcode VEX.F3.0F 0x6e - invalid */
3291
3292
3293/* Opcode VEX.0F 0x6f - invalid */
3294
3295/**
3296 * @opcode 0x6f
3297 * @oppfx 0x66
3298 * @opcpuid avx
3299 * @opgroup og_avx_simdint_datamove
3300 * @opxcpttype 1
3301 * @optest op1=1 op2=2 -> op1=2
3302 * @optest op1=0 op2=-42 -> op1=-42
3303 */
3304FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
3305{
3306 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3307 Assert(pVCpu->iem.s.uVexLength <= 1);
3308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3309 if (IEM_IS_MODRM_REG_MODE(bRm))
3310 {
3311 /*
3312 * Register, register.
3313 */
3314 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3315 IEM_MC_BEGIN(0, 0);
3316
3317 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3318 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3319 if (pVCpu->iem.s.uVexLength == 0)
3320 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3321 IEM_GET_MODRM_RM(pVCpu, bRm));
3322 else
3323 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3324 IEM_GET_MODRM_RM(pVCpu, bRm));
3325 IEM_MC_ADVANCE_RIP();
3326 IEM_MC_END();
3327 }
3328 else if (pVCpu->iem.s.uVexLength == 0)
3329 {
3330 /*
3331 * Register, memory128.
3332 */
3333 IEM_MC_BEGIN(0, 2);
3334 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3335 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3336
3337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3338 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3339 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3340 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3341
3342 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3343 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3344
3345 IEM_MC_ADVANCE_RIP();
3346 IEM_MC_END();
3347 }
3348 else
3349 {
3350 /*
3351 * Register, memory256.
3352 */
3353 IEM_MC_BEGIN(0, 2);
3354 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3356
3357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3358 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3359 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3360 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3361
3362 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3363 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3364
3365 IEM_MC_ADVANCE_RIP();
3366 IEM_MC_END();
3367 }
3368 return VINF_SUCCESS;
3369}
3370
3371/**
3372 * @opcode 0x6f
3373 * @oppfx 0xf3
3374 * @opcpuid avx
3375 * @opgroup og_avx_simdint_datamove
3376 * @opxcpttype 4UA
3377 * @optest op1=1 op2=2 -> op1=2
3378 * @optest op1=0 op2=-42 -> op1=-42
3379 */
3380FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
3381{
3382 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3383 Assert(pVCpu->iem.s.uVexLength <= 1);
3384 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3385 if (IEM_IS_MODRM_REG_MODE(bRm))
3386 {
3387 /*
3388 * Register, register.
3389 */
3390 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3391 IEM_MC_BEGIN(0, 0);
3392
3393 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3394 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3395 if (pVCpu->iem.s.uVexLength == 0)
3396 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3397 IEM_GET_MODRM_RM(pVCpu, bRm));
3398 else
3399 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3400 IEM_GET_MODRM_RM(pVCpu, bRm));
3401 IEM_MC_ADVANCE_RIP();
3402 IEM_MC_END();
3403 }
3404 else if (pVCpu->iem.s.uVexLength == 0)
3405 {
3406 /*
3407 * Register, memory128.
3408 */
3409 IEM_MC_BEGIN(0, 2);
3410 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3412
3413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3414 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3415 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3416 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3417
3418 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3419 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3420
3421 IEM_MC_ADVANCE_RIP();
3422 IEM_MC_END();
3423 }
3424 else
3425 {
3426 /*
3427 * Register, memory256.
3428 */
3429 IEM_MC_BEGIN(0, 2);
3430 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3432
3433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3434 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3435 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3436 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3437
3438 IEM_MC_FETCH_MEM_U256(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3439 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3440
3441 IEM_MC_ADVANCE_RIP();
3442 IEM_MC_END();
3443 }
3444 return VINF_SUCCESS;
3445}
3446
3447
3448/* Opcode VEX.0F 0x70 - invalid */
3449
3450
3451/**
3452 * Common worker for AVX/AVX2 instructions on the forms:
3453 * - vpxxx xmm0, xmm2/mem128, imm8
3454 * - vpxxx ymm0, ymm2/mem256, imm8
3455 *
3456 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3457 */
3458FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3459{
3460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3461 if (IEM_IS_MODRM_REG_MODE(bRm))
3462 {
3463 /*
3464 * Register, register.
3465 */
3466 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3467 if (pVCpu->iem.s.uVexLength)
3468 {
3469 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3470 IEM_MC_BEGIN(3, 2);
3471 IEM_MC_LOCAL(RTUINT256U, uDst);
3472 IEM_MC_LOCAL(RTUINT256U, uSrc);
3473 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3474 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3475 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3476 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3477 IEM_MC_PREPARE_AVX_USAGE();
3478 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3479 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bEvilArg);
3480 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3481 IEM_MC_ADVANCE_RIP();
3482 IEM_MC_END();
3483 }
3484 else
3485 {
3486 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3487 IEM_MC_BEGIN(3, 0);
3488 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3489 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3490 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3491 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3492 IEM_MC_PREPARE_AVX_USAGE();
3493 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3494 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3495 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bEvilArg);
3496 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3497 IEM_MC_ADVANCE_RIP();
3498 IEM_MC_END();
3499 }
3500 }
3501 else
3502 {
3503 /*
3504 * Register, memory.
3505 */
3506 if (pVCpu->iem.s.uVexLength)
3507 {
3508 IEM_MC_BEGIN(3, 3);
3509 IEM_MC_LOCAL(RTUINT256U, uDst);
3510 IEM_MC_LOCAL(RTUINT256U, uSrc);
3511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3512 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3513 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3514
3515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3516 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3517 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3518 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3519 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3520 IEM_MC_PREPARE_AVX_USAGE();
3521
3522 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3523 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bEvilArg);
3524 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3525
3526 IEM_MC_ADVANCE_RIP();
3527 IEM_MC_END();
3528 }
3529 else
3530 {
3531 IEM_MC_BEGIN(3, 1);
3532 IEM_MC_LOCAL(RTUINT128U, uSrc);
3533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3534 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3535 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3536
3537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3538 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
3539 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3540 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
3541 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
3542 IEM_MC_PREPARE_AVX_USAGE();
3543
3544 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3545 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3546 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bEvilArg);
3547 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3548
3549 IEM_MC_ADVANCE_RIP();
3550 IEM_MC_END();
3551 }
3552 }
3553 return VINF_SUCCESS;
3554}
3555
3556
3557/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
3558FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3559{
3560 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3561 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
3562 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
3563
3564}
3565
3566
3567/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
3568FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3569{
3570 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3571 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
3572 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
3573
3574}
3575
3576
3577/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
3578FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3579{
3580 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
3581 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
3582 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
3583}
3584
3585
3586/* Opcode VEX.0F 0x71 11/2 - invalid. */
3587/** Opcode VEX.66.0F 0x71 11/2. */
3588FNIEMOP_STUB_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm);
3589
3590/* Opcode VEX.0F 0x71 11/4 - invalid */
3591/** Opcode VEX.66.0F 0x71 11/4. */
3592FNIEMOP_STUB_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm);
3593
3594/* Opcode VEX.0F 0x71 11/6 - invalid */
3595/** Opcode VEX.66.0F 0x71 11/6. */
3596FNIEMOP_STUB_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm);
3597
3598
3599/**
3600 * VEX Group 12 jump table for register variant.
3601 */
3602IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
3603{
3604 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3605 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3606 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3607 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3608 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3609 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3610 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3611 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3612};
3613AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
3614
3615
3616/** Opcode VEX.0F 0x71. */
3617FNIEMOP_DEF(iemOp_VGrp12)
3618{
3619 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3620 if (IEM_IS_MODRM_REG_MODE(bRm))
3621 /* register, register */
3622 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3623 + pVCpu->iem.s.idxPrefix], bRm);
3624 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3625}
3626
3627
3628/* Opcode VEX.0F 0x72 11/2 - invalid. */
3629/** Opcode VEX.66.0F 0x72 11/2. */
3630FNIEMOP_STUB_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm);
3631
3632/* Opcode VEX.0F 0x72 11/4 - invalid. */
3633/** Opcode VEX.66.0F 0x72 11/4. */
3634FNIEMOP_STUB_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm);
3635
3636/* Opcode VEX.0F 0x72 11/6 - invalid. */
3637/** Opcode VEX.66.0F 0x72 11/6. */
3638FNIEMOP_STUB_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm);
3639
3640
3641/**
3642 * Group 13 jump table for register variant.
3643 */
3644IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
3645{
3646 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3647 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3648 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3649 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3650 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3651 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3652 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3653 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3654};
3655AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
3656
3657/** Opcode VEX.0F 0x72. */
3658FNIEMOP_DEF(iemOp_VGrp13)
3659{
3660 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3661 if (IEM_IS_MODRM_REG_MODE(bRm))
3662 /* register, register */
3663 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3664 + pVCpu->iem.s.idxPrefix], bRm);
3665 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3666}
3667
3668
3669/* Opcode VEX.0F 0x73 11/2 - invalid. */
3670/** Opcode VEX.66.0F 0x73 11/2. */
3671FNIEMOP_STUB_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm);
3672
3673/** Opcode VEX.66.0F 0x73 11/3. */
3674FNIEMOP_STUB_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm);
3675
3676/* Opcode VEX.0F 0x73 11/6 - invalid. */
3677/** Opcode VEX.66.0F 0x73 11/6. */
3678FNIEMOP_STUB_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm);
3679
3680/** Opcode VEX.66.0F 0x73 11/7. */
3681FNIEMOP_STUB_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm);
3682
3683/**
3684 * Group 14 jump table for register variant.
3685 */
3686IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
3687{
3688 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3689 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3690 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3691 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3692 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3693 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3694 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3695 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3696};
3697AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
3698
3699
3700/** Opcode VEX.0F 0x73. */
3701FNIEMOP_DEF(iemOp_VGrp14)
3702{
3703 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3704 if (IEM_IS_MODRM_REG_MODE(bRm))
3705 /* register, register */
3706 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3707 + pVCpu->iem.s.idxPrefix], bRm);
3708 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3709}
3710
3711
3712/* Opcode VEX.0F 0x74 - invalid */
3713
3714
3715/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
3716FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3717{
3718 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3719 IEMOPMEDIAF3_INIT_VARS( vpcmpeqb);
3720 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3721}
3722
3723/* Opcode VEX.F3.0F 0x74 - invalid */
3724/* Opcode VEX.F2.0F 0x74 - invalid */
3725
3726
3727/* Opcode VEX.0F 0x75 - invalid */
3728
3729
3730/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
3731FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3732{
3733 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3734 IEMOPMEDIAF3_INIT_VARS( vpcmpeqw);
3735 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3736}
3737
3738
3739/* Opcode VEX.F3.0F 0x75 - invalid */
3740/* Opcode VEX.F2.0F 0x75 - invalid */
3741
3742
3743/* Opcode VEX.0F 0x76 - invalid */
3744
3745
3746/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
3747FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3748{
3749 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3750 IEMOPMEDIAF3_INIT_VARS( vpcmpeqd);
3751 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3752}
3753
3754
3755/* Opcode VEX.F3.0F 0x76 - invalid */
3756/* Opcode VEX.F2.0F 0x76 - invalid */
3757
3758
3759/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
3760FNIEMOP_STUB(iemOp_vzeroupperv__vzeroallv);
3761/* Opcode VEX.66.0F 0x77 - invalid */
3762/* Opcode VEX.F3.0F 0x77 - invalid */
3763/* Opcode VEX.F2.0F 0x77 - invalid */
3764
3765/* Opcode VEX.0F 0x78 - invalid */
3766/* Opcode VEX.66.0F 0x78 - invalid */
3767/* Opcode VEX.F3.0F 0x78 - invalid */
3768/* Opcode VEX.F2.0F 0x78 - invalid */
3769
3770/* Opcode VEX.0F 0x79 - invalid */
3771/* Opcode VEX.66.0F 0x79 - invalid */
3772/* Opcode VEX.F3.0F 0x79 - invalid */
3773/* Opcode VEX.F2.0F 0x79 - invalid */
3774
3775/* Opcode VEX.0F 0x7a - invalid */
3776/* Opcode VEX.66.0F 0x7a - invalid */
3777/* Opcode VEX.F3.0F 0x7a - invalid */
3778/* Opcode VEX.F2.0F 0x7a - invalid */
3779
3780/* Opcode VEX.0F 0x7b - invalid */
3781/* Opcode VEX.66.0F 0x7b - invalid */
3782/* Opcode VEX.F3.0F 0x7b - invalid */
3783/* Opcode VEX.F2.0F 0x7b - invalid */
3784
3785/* Opcode VEX.0F 0x7c - invalid */
3786/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
3787FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
3788/* Opcode VEX.F3.0F 0x7c - invalid */
3789/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
3790FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
3791
3792/* Opcode VEX.0F 0x7d - invalid */
3793/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
3794FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
3795/* Opcode VEX.F3.0F 0x7d - invalid */
3796/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
3797FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
3798
3799
3800/* Opcode VEX.0F 0x7e - invalid */
3801
3802FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
3803{
3804 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3805 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3806 {
3807 /**
3808 * @opcode 0x7e
3809 * @opcodesub rex.w=1
3810 * @oppfx 0x66
3811 * @opcpuid avx
3812 * @opgroup og_avx_simdint_datamov
3813 * @opxcpttype 5
3814 * @optest 64-bit / op1=1 op2=2 -> op1=2
3815 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3816 */
3817 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3818 if (IEM_IS_MODRM_REG_MODE(bRm))
3819 {
3820 /* greg64, XMM */
3821 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3822 IEM_MC_BEGIN(0, 1);
3823 IEM_MC_LOCAL(uint64_t, u64Tmp);
3824
3825 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3826 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3827
3828 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3829 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
3830
3831 IEM_MC_ADVANCE_RIP();
3832 IEM_MC_END();
3833 }
3834 else
3835 {
3836 /* [mem64], XMM */
3837 IEM_MC_BEGIN(0, 2);
3838 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3839 IEM_MC_LOCAL(uint64_t, u64Tmp);
3840
3841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3842 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3843 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3844 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3845
3846 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3847 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
3848
3849 IEM_MC_ADVANCE_RIP();
3850 IEM_MC_END();
3851 }
3852 }
3853 else
3854 {
3855 /**
3856 * @opdone
3857 * @opcode 0x7e
3858 * @opcodesub rex.w=0
3859 * @oppfx 0x66
3860 * @opcpuid avx
3861 * @opgroup og_avx_simdint_datamov
3862 * @opxcpttype 5
3863 * @opfunction iemOp_vmovd_q_Vy_Ey
3864 * @optest op1=1 op2=2 -> op1=2
3865 * @optest op1=0 op2=-42 -> op1=-42
3866 */
3867 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3868 if (IEM_IS_MODRM_REG_MODE(bRm))
3869 {
3870 /* greg32, XMM */
3871 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3872 IEM_MC_BEGIN(0, 1);
3873 IEM_MC_LOCAL(uint32_t, u32Tmp);
3874
3875 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3876 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3877
3878 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3879 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
3880
3881 IEM_MC_ADVANCE_RIP();
3882 IEM_MC_END();
3883 }
3884 else
3885 {
3886 /* [mem32], XMM */
3887 IEM_MC_BEGIN(0, 2);
3888 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3889 IEM_MC_LOCAL(uint32_t, u32Tmp);
3890
3891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3892 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3893 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3894 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
3895
3896 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
3897 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
3898
3899 IEM_MC_ADVANCE_RIP();
3900 IEM_MC_END();
3901 }
3902 }
3903 return VINF_SUCCESS;
3904}
3905
3906/**
3907 * @opcode 0x7e
3908 * @oppfx 0xf3
3909 * @opcpuid avx
3910 * @opgroup og_avx_pcksclr_datamove
3911 * @opxcpttype none
3912 * @optest op1=1 op2=2 -> op1=2
3913 * @optest op1=0 op2=-42 -> op1=-42
3914 */
3915FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
3916{
3917 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3918 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3919 if (IEM_IS_MODRM_REG_MODE(bRm))
3920 {
3921 /*
3922 * Register, register.
3923 */
3924 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3925 IEM_MC_BEGIN(0, 0);
3926
3927 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3928 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3929
3930 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3931 IEM_GET_MODRM_RM(pVCpu, bRm));
3932 IEM_MC_ADVANCE_RIP();
3933 IEM_MC_END();
3934 }
3935 else
3936 {
3937 /*
3938 * Memory, register.
3939 */
3940 IEM_MC_BEGIN(0, 2);
3941 IEM_MC_LOCAL(uint64_t, uSrc);
3942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3943
3944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3945 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
3946 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3947 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3948
3949 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3950 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
3951
3952 IEM_MC_ADVANCE_RIP();
3953 IEM_MC_END();
3954 }
3955 return VINF_SUCCESS;
3956
3957}
3958/* Opcode VEX.F2.0F 0x7e - invalid */
3959
3960
3961/* Opcode VEX.0F 0x7f - invalid */
3962
3963/**
3964 * @opcode 0x7f
3965 * @oppfx 0x66
3966 * @opcpuid avx
3967 * @opgroup og_avx_simdint_datamove
3968 * @opxcpttype 1
3969 * @optest op1=1 op2=2 -> op1=2
3970 * @optest op1=0 op2=-42 -> op1=-42
3971 */
3972FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
3973{
3974 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3975 Assert(pVCpu->iem.s.uVexLength <= 1);
3976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3977 if (IEM_IS_MODRM_REG_MODE(bRm))
3978 {
3979 /*
3980 * Register, register.
3981 */
3982 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
3983 IEM_MC_BEGIN(0, 0);
3984
3985 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3986 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3987 if (pVCpu->iem.s.uVexLength == 0)
3988 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
3989 IEM_GET_MODRM_REG(pVCpu, bRm));
3990 else
3991 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
3992 IEM_GET_MODRM_REG(pVCpu, bRm));
3993 IEM_MC_ADVANCE_RIP();
3994 IEM_MC_END();
3995 }
3996 else if (pVCpu->iem.s.uVexLength == 0)
3997 {
3998 /*
3999 * Register, memory128.
4000 */
4001 IEM_MC_BEGIN(0, 2);
4002 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4004
4005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4006 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4007 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4008 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4009
4010 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4011 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4012
4013 IEM_MC_ADVANCE_RIP();
4014 IEM_MC_END();
4015 }
4016 else
4017 {
4018 /*
4019 * Register, memory256.
4020 */
4021 IEM_MC_BEGIN(0, 2);
4022 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4024
4025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4026 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4027 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4028 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4029
4030 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4031 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4032
4033 IEM_MC_ADVANCE_RIP();
4034 IEM_MC_END();
4035 }
4036 return VINF_SUCCESS;
4037}
4038
4039/**
4040 * @opcode 0x7f
4041 * @oppfx 0xf3
4042 * @opcpuid avx
4043 * @opgroup og_avx_simdint_datamove
4044 * @opxcpttype 4UA
4045 * @optest op1=1 op2=2 -> op1=2
4046 * @optest op1=0 op2=-42 -> op1=-42
4047 */
4048FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
4049{
4050 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4051 Assert(pVCpu->iem.s.uVexLength <= 1);
4052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4053 if (IEM_IS_MODRM_REG_MODE(bRm))
4054 {
4055 /*
4056 * Register, register.
4057 */
4058 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4059 IEM_MC_BEGIN(0, 0);
4060
4061 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4062 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4063 if (pVCpu->iem.s.uVexLength == 0)
4064 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4065 IEM_GET_MODRM_REG(pVCpu, bRm));
4066 else
4067 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4068 IEM_GET_MODRM_REG(pVCpu, bRm));
4069 IEM_MC_ADVANCE_RIP();
4070 IEM_MC_END();
4071 }
4072 else if (pVCpu->iem.s.uVexLength == 0)
4073 {
4074 /*
4075 * Register, memory128.
4076 */
4077 IEM_MC_BEGIN(0, 2);
4078 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4080
4081 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4082 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4083 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4084 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4085
4086 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4087 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4088
4089 IEM_MC_ADVANCE_RIP();
4090 IEM_MC_END();
4091 }
4092 else
4093 {
4094 /*
4095 * Register, memory256.
4096 */
4097 IEM_MC_BEGIN(0, 2);
4098 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4099 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4100
4101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4102 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4103 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4104 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4105
4106 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4107 IEM_MC_STORE_MEM_U256(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4108
4109 IEM_MC_ADVANCE_RIP();
4110 IEM_MC_END();
4111 }
4112 return VINF_SUCCESS;
4113}
4114
4115/* Opcode VEX.F2.0F 0x7f - invalid */
4116
4117
4118/* Opcode VEX.0F 0x80 - invalid */
4119/* Opcode VEX.0F 0x81 - invalid */
4120/* Opcode VEX.0F 0x82 - invalid */
4121/* Opcode VEX.0F 0x83 - invalid */
4122/* Opcode VEX.0F 0x84 - invalid */
4123/* Opcode VEX.0F 0x85 - invalid */
4124/* Opcode VEX.0F 0x86 - invalid */
4125/* Opcode VEX.0F 0x87 - invalid */
4126/* Opcode VEX.0F 0x88 - invalid */
4127/* Opcode VEX.0F 0x89 - invalid */
4128/* Opcode VEX.0F 0x8a - invalid */
4129/* Opcode VEX.0F 0x8b - invalid */
4130/* Opcode VEX.0F 0x8c - invalid */
4131/* Opcode VEX.0F 0x8d - invalid */
4132/* Opcode VEX.0F 0x8e - invalid */
4133/* Opcode VEX.0F 0x8f - invalid */
4134/* Opcode VEX.0F 0x90 - invalid */
4135/* Opcode VEX.0F 0x91 - invalid */
4136/* Opcode VEX.0F 0x92 - invalid */
4137/* Opcode VEX.0F 0x93 - invalid */
4138/* Opcode VEX.0F 0x94 - invalid */
4139/* Opcode VEX.0F 0x95 - invalid */
4140/* Opcode VEX.0F 0x96 - invalid */
4141/* Opcode VEX.0F 0x97 - invalid */
4142/* Opcode VEX.0F 0x98 - invalid */
4143/* Opcode VEX.0F 0x99 - invalid */
4144/* Opcode VEX.0F 0x9a - invalid */
4145/* Opcode VEX.0F 0x9b - invalid */
4146/* Opcode VEX.0F 0x9c - invalid */
4147/* Opcode VEX.0F 0x9d - invalid */
4148/* Opcode VEX.0F 0x9e - invalid */
4149/* Opcode VEX.0F 0x9f - invalid */
4150/* Opcode VEX.0F 0xa0 - invalid */
4151/* Opcode VEX.0F 0xa1 - invalid */
4152/* Opcode VEX.0F 0xa2 - invalid */
4153/* Opcode VEX.0F 0xa3 - invalid */
4154/* Opcode VEX.0F 0xa4 - invalid */
4155/* Opcode VEX.0F 0xa5 - invalid */
4156/* Opcode VEX.0F 0xa6 - invalid */
4157/* Opcode VEX.0F 0xa7 - invalid */
4158/* Opcode VEX.0F 0xa8 - invalid */
4159/* Opcode VEX.0F 0xa9 - invalid */
4160/* Opcode VEX.0F 0xaa - invalid */
4161/* Opcode VEX.0F 0xab - invalid */
4162/* Opcode VEX.0F 0xac - invalid */
4163/* Opcode VEX.0F 0xad - invalid */
4164
4165
4166/* Opcode VEX.0F 0xae mem/0 - invalid. */
4167/* Opcode VEX.0F 0xae mem/1 - invalid. */
4168
4169/**
4170 * @ opmaps grp15
4171 * @ opcode !11/2
4172 * @ oppfx none
4173 * @ opcpuid sse
4174 * @ opgroup og_sse_mxcsrsm
4175 * @ opxcpttype 5
4176 * @ optest op1=0 -> mxcsr=0
4177 * @ optest op1=0x2083 -> mxcsr=0x2083
4178 * @ optest op1=0xfffffffe -> value.xcpt=0xd
4179 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
4180 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
4181 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
4182 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
4183 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
4184 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4185 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4186 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4187 */
4188FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
4189//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
4190//{
4191// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4192// if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)
4193// return IEMOP_RAISE_INVALID_OPCODE();
4194//
4195// IEM_MC_BEGIN(2, 0);
4196// IEM_MC_ARG(uint8_t, iEffSeg, 0);
4197// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4198// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4199// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4200// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4201// IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4202// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
4203// IEM_MC_END();
4204// return VINF_SUCCESS;
4205//}
4206
4207
4208/**
4209 * @opmaps vexgrp15
4210 * @opcode !11/3
4211 * @oppfx none
4212 * @opcpuid avx
4213 * @opgroup og_avx_mxcsrsm
4214 * @opxcpttype 5
4215 * @optest mxcsr=0 -> op1=0
4216 * @optest mxcsr=0x2083 -> op1=0x2083
4217 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
4218 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
4219 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
4220 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
4221 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
4222 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
4223 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
4224 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
4225 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
4226 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
4227 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4228 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
4229 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4230 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
4231 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4232 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
4233 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
4234 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
4235 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
4236 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
4237 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
4238 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
4239 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
4240 * -> value.xcpt=0x6
4241 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
4242 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
4243 * APMv4 rev 3.17 page 509.
4244 * @todo Test this instruction on AMD Ryzen.
4245 */
4246FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
4247{
4248 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4249 IEM_MC_BEGIN(2, 0);
4250 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4251 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4253 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
4254 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4255 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
4256 IEM_MC_CALL_CIMPL_2(iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
4257 IEM_MC_END();
4258 return VINF_SUCCESS;
4259}
4260
4261/* Opcode VEX.0F 0xae mem/4 - invalid. */
4262/* Opcode VEX.0F 0xae mem/5 - invalid. */
4263/* Opcode VEX.0F 0xae mem/6 - invalid. */
4264/* Opcode VEX.0F 0xae mem/7 - invalid. */
4265
4266/* Opcode VEX.0F 0xae 11b/0 - invalid. */
4267/* Opcode VEX.0F 0xae 11b/1 - invalid. */
4268/* Opcode VEX.0F 0xae 11b/2 - invalid. */
4269/* Opcode VEX.0F 0xae 11b/3 - invalid. */
4270/* Opcode VEX.0F 0xae 11b/4 - invalid. */
4271/* Opcode VEX.0F 0xae 11b/5 - invalid. */
4272/* Opcode VEX.0F 0xae 11b/6 - invalid. */
4273/* Opcode VEX.0F 0xae 11b/7 - invalid. */
4274
4275/**
4276 * Vex group 15 jump table for memory variant.
4277 */
4278IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
4279{ /* pfx: none, 066h, 0f3h, 0f2h */
4280 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4281 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4282 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4283 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4284 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4285 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4286 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4287 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4288};
4289AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
4290
4291
4292/** Opcode vex. 0xae. */
4293FNIEMOP_DEF(iemOp_VGrp15)
4294{
4295 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4296 if (IEM_IS_MODRM_REG_MODE(bRm))
4297 /* register, register */
4298 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
4299
4300 /* memory, register */
4301 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4302 + pVCpu->iem.s.idxPrefix], bRm);
4303}
4304
4305
4306/* Opcode VEX.0F 0xaf - invalid. */
4307
4308/* Opcode VEX.0F 0xb0 - invalid. */
4309/* Opcode VEX.0F 0xb1 - invalid. */
4310/* Opcode VEX.0F 0xb2 - invalid. */
4311/* Opcode VEX.0F 0xb2 - invalid. */
4312/* Opcode VEX.0F 0xb3 - invalid. */
4313/* Opcode VEX.0F 0xb4 - invalid. */
4314/* Opcode VEX.0F 0xb5 - invalid. */
4315/* Opcode VEX.0F 0xb6 - invalid. */
4316/* Opcode VEX.0F 0xb7 - invalid. */
4317/* Opcode VEX.0F 0xb8 - invalid. */
4318/* Opcode VEX.0F 0xb9 - invalid. */
4319/* Opcode VEX.0F 0xba - invalid. */
4320/* Opcode VEX.0F 0xbb - invalid. */
4321/* Opcode VEX.0F 0xbc - invalid. */
4322/* Opcode VEX.0F 0xbd - invalid. */
4323/* Opcode VEX.0F 0xbe - invalid. */
4324/* Opcode VEX.0F 0xbf - invalid. */
4325
4326/* Opcode VEX.0F 0xc0 - invalid. */
4327/* Opcode VEX.66.0F 0xc0 - invalid. */
4328/* Opcode VEX.F3.0F 0xc0 - invalid. */
4329/* Opcode VEX.F2.0F 0xc0 - invalid. */
4330
4331/* Opcode VEX.0F 0xc1 - invalid. */
4332/* Opcode VEX.66.0F 0xc1 - invalid. */
4333/* Opcode VEX.F3.0F 0xc1 - invalid. */
4334/* Opcode VEX.F2.0F 0xc1 - invalid. */
4335
4336/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
4337FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
4338/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
4339FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
4340/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
4341FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
4342/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
4343FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
4344
4345/* Opcode VEX.0F 0xc3 - invalid */
4346/* Opcode VEX.66.0F 0xc3 - invalid */
4347/* Opcode VEX.F3.0F 0xc3 - invalid */
4348/* Opcode VEX.F2.0F 0xc3 - invalid */
4349
4350/* Opcode VEX.0F 0xc4 - invalid */
4351
4352
4353/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
4354FNIEMOP_DEF(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib)
4355{
4356 /*IEMOP_MNEMONIC4(VEX_RMV, VPINSRW, vpinsrw, Vdq, Vdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
4357 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4358 if (IEM_IS_MODRM_REG_MODE(bRm))
4359 {
4360 /*
4361 * Register, register.
4362 */
4363 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4364 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4365 IEM_MC_BEGIN(4, 0);
4366 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4367 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4368 IEM_MC_ARG(uint16_t, u16Src, 2);
4369 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 3);
4370 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4371 IEM_MC_PREPARE_AVX_USAGE();
4372 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4373 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4374 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));
4375 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpinsrw_u128, iemAImpl_vpinsrw_u128_fallback),
4376 puDst, puSrc, u16Src, bEvilArg);
4377 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4378 IEM_MC_ADVANCE_RIP();
4379 IEM_MC_END();
4380 }
4381 else
4382 {
4383 /*
4384 * Register, memory.
4385 */
4386 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4387 IEM_MC_BEGIN(4, 1);
4388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4389 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4390 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4391 IEM_MC_ARG(uint16_t, u16Src, 2);
4392 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 3);
4393
4394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4395 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4396 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4397 IEM_MC_PREPARE_AVX_USAGE();
4398
4399 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4400 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4401 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4402 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpinsrw_u128, iemAImpl_vpinsrw_u128_fallback),
4403 puDst, puSrc, u16Src, bEvilArg);
4404 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4405
4406 IEM_MC_ADVANCE_RIP();
4407 IEM_MC_END();
4408 }
4409
4410 return VINF_SUCCESS;
4411}
4412
4413
4414/* Opcode VEX.F3.0F 0xc4 - invalid */
4415/* Opcode VEX.F2.0F 0xc4 - invalid */
4416
4417/* Opcode VEX.0F 0xc5 - invlid */
4418
4419
4420/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
4421FNIEMOP_DEF(iemOp_vpextrw_Gd_Udq_Ib)
4422{
4423 IEMOP_MNEMONIC3(VEX_RMI_REG, VPEXTRW, vpextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
4424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4425 if (IEM_IS_MODRM_REG_MODE(bRm))
4426 {
4427 /*
4428 * Register, register.
4429 */
4430 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
4431 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4432 IEM_MC_BEGIN(3, 1);
4433 IEM_MC_LOCAL(uint16_t, u16Dst);
4434 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Dst, 0);
4435 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4436 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
4437 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4438 IEM_MC_PREPARE_AVX_USAGE();
4439 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4440 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vpextrw_u128, iemAImpl_vpextrw_u128_fallback),
4441 pu16Dst, puSrc, bEvilArg);
4442 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u16Dst);
4443 IEM_MC_ADVANCE_RIP();
4444 IEM_MC_END();
4445 return VINF_SUCCESS;
4446 }
4447
4448 /* No memory operand. */
4449 return IEMOP_RAISE_INVALID_OPCODE();
4450}
4451
4452
4453/* Opcode VEX.F3.0F 0xc5 - invalid */
4454/* Opcode VEX.F2.0F 0xc5 - invalid */
4455
4456
4457#define VSHUFP_X(a_Instr) \
4458 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4459 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4460 { \
4461 /* \
4462 * Register, register. \
4463 */ \
4464 if (pVCpu->iem.s.uVexLength) \
4465 { \
4466 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); \
4467 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4468 IEM_MC_BEGIN(4, 3); \
4469 IEM_MC_LOCAL(RTUINT256U, uDst); \
4470 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4471 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4472 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4473 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4474 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4475 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 3); \
4476 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); \
4477 IEM_MC_PREPARE_AVX_USAGE(); \
4478 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4479 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4480 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4481 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bEvilArg); \
4482 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4483 IEM_MC_ADVANCE_RIP(); \
4484 IEM_MC_END(); \
4485 } \
4486 else \
4487 { \
4488 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); \
4489 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4490 IEM_MC_BEGIN(4, 0); \
4491 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4492 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4493 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); \
4494 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 3); \
4495 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); \
4496 IEM_MC_PREPARE_AVX_USAGE(); \
4497 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4498 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4499 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4500 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4501 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bEvilArg); \
4502 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4503 IEM_MC_ADVANCE_RIP(); \
4504 IEM_MC_END(); \
4505 } \
4506 } \
4507 else \
4508 { \
4509 /* \
4510 * Register, memory. \
4511 */ \
4512 if (pVCpu->iem.s.uVexLength) \
4513 { \
4514 IEM_MC_BEGIN(4, 4); \
4515 IEM_MC_LOCAL(RTUINT256U, uDst); \
4516 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4517 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4519 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4520 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4521 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4523 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); \
4524 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 3); \
4525 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4526 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); \
4527 IEM_MC_PREPARE_AVX_USAGE(); \
4528 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4529 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4530 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4531 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bEvilArg); \
4532 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4533 IEM_MC_ADVANCE_RIP(); \
4534 IEM_MC_END(); \
4535 } \
4536 else \
4537 { \
4538 IEM_MC_BEGIN(4, 2); \
4539 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
4540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4541 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4542 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4543 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); \
4544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
4545 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); \
4546 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 3); \
4547 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4548 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); \
4549 IEM_MC_PREPARE_AVX_USAGE(); \
4550 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4551 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4552 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4553 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4554 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bEvilArg); \
4555 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4556 IEM_MC_ADVANCE_RIP(); \
4557 IEM_MC_END(); \
4558 } \
4559 } \
4560 return VINF_SUCCESS;
4561
4562/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
4563FNIEMOP_DEF(iemOp_vshufps_Vps_Hps_Wps_Ib)
4564{
4565 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPS, vshufps, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4566 VSHUFP_X(vshufps);
4567}
4568
4569
4570/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
4571FNIEMOP_DEF(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib)
4572{
4573 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPD, vshufpd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4574 VSHUFP_X(vshufpd);
4575}
4576#undef VSHUFP_X
4577
4578
4579/* Opcode VEX.F3.0F 0xc6 - invalid */
4580/* Opcode VEX.F2.0F 0xc6 - invalid */
4581
4582/* Opcode VEX.0F 0xc7 - invalid */
4583/* Opcode VEX.66.0F 0xc7 - invalid */
4584/* Opcode VEX.F3.0F 0xc7 - invalid */
4585/* Opcode VEX.F2.0F 0xc7 - invalid */
4586
4587/* Opcode VEX.0F 0xc8 - invalid */
4588/* Opcode VEX.0F 0xc9 - invalid */
4589/* Opcode VEX.0F 0xca - invalid */
4590/* Opcode VEX.0F 0xcb - invalid */
4591/* Opcode VEX.0F 0xcc - invalid */
4592/* Opcode VEX.0F 0xcd - invalid */
4593/* Opcode VEX.0F 0xce - invalid */
4594/* Opcode VEX.0F 0xcf - invalid */
4595
4596
4597/* Opcode VEX.0F 0xd0 - invalid */
4598/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
4599FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
4600/* Opcode VEX.F3.0F 0xd0 - invalid */
4601/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
4602FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
4603
4604/* Opcode VEX.0F 0xd1 - invalid */
4605/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
4606FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W);
4607/* Opcode VEX.F3.0F 0xd1 - invalid */
4608/* Opcode VEX.F2.0F 0xd1 - invalid */
4609
4610/* Opcode VEX.0F 0xd2 - invalid */
4611/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
4612FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx);
4613/* Opcode VEX.F3.0F 0xd2 - invalid */
4614/* Opcode VEX.F2.0F 0xd2 - invalid */
4615
4616/* Opcode VEX.0F 0xd3 - invalid */
4617/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
4618FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx);
4619/* Opcode VEX.F3.0F 0xd3 - invalid */
4620/* Opcode VEX.F2.0F 0xd3 - invalid */
4621
4622/* Opcode VEX.0F 0xd4 - invalid */
4623
4624
4625/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
4626FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
4627{
4628 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4629 IEMOPMEDIAF3_INIT_VARS( vpaddq);
4630 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4631}
4632
4633
4634/* Opcode VEX.F3.0F 0xd4 - invalid */
4635/* Opcode VEX.F2.0F 0xd4 - invalid */
4636
4637/* Opcode VEX.0F 0xd5 - invalid */
4638
4639
4640/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
4641FNIEMOP_DEF(iemOp_vpmullw_Vx_Hx_Wx)
4642{
4643 IEMOP_MNEMONIC3(VEX_RVM, VPMULLW, vpmullw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4644 IEMOPMEDIAOPTF3_INIT_VARS(vpmullw);
4645 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4646}
4647
4648
4649/* Opcode VEX.F3.0F 0xd5 - invalid */
4650/* Opcode VEX.F2.0F 0xd5 - invalid */
4651
4652/* Opcode VEX.0F 0xd6 - invalid */
4653
4654/**
4655 * @opcode 0xd6
4656 * @oppfx 0x66
4657 * @opcpuid avx
4658 * @opgroup og_avx_pcksclr_datamove
4659 * @opxcpttype none
4660 * @optest op1=-1 op2=2 -> op1=2
4661 * @optest op1=0 op2=-42 -> op1=-42
4662 */
4663FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
4664{
4665 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4666 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4667 if (IEM_IS_MODRM_REG_MODE(bRm))
4668 {
4669 /*
4670 * Register, register.
4671 */
4672 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
4673 IEM_MC_BEGIN(0, 0);
4674
4675 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4676 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4677
4678 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4679 IEM_GET_MODRM_REG(pVCpu, bRm));
4680 IEM_MC_ADVANCE_RIP();
4681 IEM_MC_END();
4682 }
4683 else
4684 {
4685 /*
4686 * Memory, register.
4687 */
4688 IEM_MC_BEGIN(0, 2);
4689 IEM_MC_LOCAL(uint64_t, uSrc);
4690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4691
4692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4693 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV();
4694 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4695 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4696
4697 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4698 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4699
4700 IEM_MC_ADVANCE_RIP();
4701 IEM_MC_END();
4702 }
4703 return VINF_SUCCESS;
4704}
4705
4706/* Opcode VEX.F3.0F 0xd6 - invalid */
4707/* Opcode VEX.F2.0F 0xd6 - invalid */
4708
4709
4710/* Opcode VEX.0F 0xd7 - invalid */
4711
4712/** Opcode VEX.66.0F 0xd7 - */
4713FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
4714{
4715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4716 /* Docs says register only. */
4717 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
4718 {
4719 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
4720 IEMOP_MNEMONIC2(RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_SSE | DISOPTYPE_HARMLESS, 0);
4721 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4722 if (pVCpu->iem.s.uVexLength)
4723 {
4724 IEM_MC_BEGIN(2, 1);
4725 IEM_MC_ARG(uint64_t *, puDst, 0);
4726 IEM_MC_LOCAL(RTUINT256U, uSrc);
4727 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4728 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
4729 IEM_MC_PREPARE_AVX_USAGE();
4730 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4731 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4732 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
4733 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
4734 IEM_MC_ADVANCE_RIP();
4735 IEM_MC_END();
4736 }
4737 else
4738 {
4739 IEM_MC_BEGIN(2, 0);
4740 IEM_MC_ARG(uint64_t *, puDst, 0);
4741 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4742 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT();
4743 IEM_MC_PREPARE_AVX_USAGE();
4744 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4745 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4746 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
4747 IEM_MC_ADVANCE_RIP();
4748 IEM_MC_END();
4749 }
4750 return VINF_SUCCESS;
4751 }
4752 return IEMOP_RAISE_INVALID_OPCODE();
4753}
4754
4755
4756/* Opcode VEX.F3.0F 0xd7 - invalid */
4757/* Opcode VEX.F2.0F 0xd7 - invalid */
4758
4759
4760/* Opcode VEX.0F 0xd8 - invalid */
4761/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, W */
4762FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W);
4763/* Opcode VEX.F3.0F 0xd8 - invalid */
4764/* Opcode VEX.F2.0F 0xd8 - invalid */
4765
4766/* Opcode VEX.0F 0xd9 - invalid */
4767/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
4768FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx);
4769/* Opcode VEX.F3.0F 0xd9 - invalid */
4770/* Opcode VEX.F2.0F 0xd9 - invalid */
4771
4772/* Opcode VEX.0F 0xda - invalid */
4773
4774
4775/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
4776FNIEMOP_DEF(iemOp_vpminub_Vx_Hx_Wx)
4777{
4778 IEMOP_MNEMONIC3(VEX_RVM, VPMINUB, vpminub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4779 IEMOPMEDIAF3_INIT_VARS(vpminub);
4780 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4781}
4782
4783
4784/* Opcode VEX.F3.0F 0xda - invalid */
4785/* Opcode VEX.F2.0F 0xda - invalid */
4786
4787/* Opcode VEX.0F 0xdb - invalid */
4788
4789
4790/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
4791FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
4792{
4793 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4794 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4795 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
4796}
4797
4798
4799/* Opcode VEX.F3.0F 0xdb - invalid */
4800/* Opcode VEX.F2.0F 0xdb - invalid */
4801
4802/* Opcode VEX.0F 0xdc - invalid */
4803/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
4804FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx);
4805/* Opcode VEX.F3.0F 0xdc - invalid */
4806/* Opcode VEX.F2.0F 0xdc - invalid */
4807
4808/* Opcode VEX.0F 0xdd - invalid */
4809/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
4810FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx);
4811/* Opcode VEX.F3.0F 0xdd - invalid */
4812/* Opcode VEX.F2.0F 0xdd - invalid */
4813
4814/* Opcode VEX.0F 0xde - invalid */
4815
4816
4817/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, Wx */
4818FNIEMOP_DEF(iemOp_vpmaxub_Vx_Hx_Wx)
4819{
4820 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUB, vpmaxub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4821 IEMOPMEDIAF3_INIT_VARS(vpmaxub);
4822 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4823}
4824
4825
4826/* Opcode VEX.F3.0F 0xde - invalid */
4827/* Opcode VEX.F2.0F 0xde - invalid */
4828
4829/* Opcode VEX.0F 0xdf - invalid */
4830
4831
4832/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
4833FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
4834{
4835 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4836 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
4837 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
4838}
4839
4840
4841/* Opcode VEX.F3.0F 0xdf - invalid */
4842/* Opcode VEX.F2.0F 0xdf - invalid */
4843
4844/* Opcode VEX.0F 0xe0 - invalid */
4845
4846
4847/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
4848FNIEMOP_DEF(iemOp_vpavgb_Vx_Hx_Wx)
4849{
4850 IEMOP_MNEMONIC3(VEX_RVM, VPAVGB, vpavgb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
4851 IEMOPMEDIAOPTF3_INIT_VARS(vpavgb);
4852 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4853}
4854
4855
4856/* Opcode VEX.F3.0F 0xe0 - invalid */
4857/* Opcode VEX.F2.0F 0xe0 - invalid */
4858
4859/* Opcode VEX.0F 0xe1 - invalid */
4860/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
4861FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W);
4862/* Opcode VEX.F3.0F 0xe1 - invalid */
4863/* Opcode VEX.F2.0F 0xe1 - invalid */
4864
4865/* Opcode VEX.0F 0xe2 - invalid */
4866/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
4867FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx);
4868/* Opcode VEX.F3.0F 0xe2 - invalid */
4869/* Opcode VEX.F2.0F 0xe2 - invalid */
4870
4871/* Opcode VEX.0F 0xe3 - invalid */
4872
4873
4874/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
4875FNIEMOP_DEF(iemOp_vpavgw_Vx_Hx_Wx)
4876{
4877 IEMOP_MNEMONIC3(VEX_RVM, VPAVGW, vpavgw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
4878 IEMOPMEDIAOPTF3_INIT_VARS(vpavgw);
4879 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4880}
4881
4882
4883/* Opcode VEX.F3.0F 0xe3 - invalid */
4884/* Opcode VEX.F2.0F 0xe3 - invalid */
4885
4886/* Opcode VEX.0F 0xe4 - invalid */
4887
4888
4889/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, Wx */
4890FNIEMOP_DEF(iemOp_vpmulhuw_Vx_Hx_Wx)
4891{
4892 IEMOP_MNEMONIC3(VEX_RVM, VPMULHUW, vpmulhuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
4893 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhuw);
4894 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4895}
4896
4897
4898/* Opcode VEX.F3.0F 0xe4 - invalid */
4899/* Opcode VEX.F2.0F 0xe4 - invalid */
4900
4901/* Opcode VEX.0F 0xe5 - invalid */
4902
4903
4904/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
4905FNIEMOP_DEF(iemOp_vpmulhw_Vx_Hx_Wx)
4906{
4907 IEMOP_MNEMONIC3(VEX_RVM, VPMULHW, vpmulhw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, 0);
4908 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhw);
4909 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4910}
4911
4912
4913/* Opcode VEX.F3.0F 0xe5 - invalid */
4914/* Opcode VEX.F2.0F 0xe5 - invalid */
4915
4916/* Opcode VEX.0F 0xe6 - invalid */
4917/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
4918FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
4919/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
4920FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
4921/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
4922FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
4923
4924
4925/* Opcode VEX.0F 0xe7 - invalid */
4926
4927/**
4928 * @opcode 0xe7
4929 * @opcodesub !11 mr/reg
4930 * @oppfx 0x66
4931 * @opcpuid avx
4932 * @opgroup og_avx_cachect
4933 * @opxcpttype 1
4934 * @optest op1=-1 op2=2 -> op1=2
4935 * @optest op1=0 op2=-42 -> op1=-42
4936 */
4937FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
4938{
4939 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4940 Assert(pVCpu->iem.s.uVexLength <= 1);
4941 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4942 if (IEM_IS_MODRM_MEM_MODE(bRm))
4943 {
4944 if (pVCpu->iem.s.uVexLength == 0)
4945 {
4946 /*
4947 * 128-bit: Memory, register.
4948 */
4949 IEM_MC_BEGIN(0, 2);
4950 IEM_MC_LOCAL(RTUINT128U, uSrc);
4951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4952
4953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4954 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4955 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4956 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4957
4958 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4959 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4960
4961 IEM_MC_ADVANCE_RIP();
4962 IEM_MC_END();
4963 }
4964 else
4965 {
4966 /*
4967 * 256-bit: Memory, register.
4968 */
4969 IEM_MC_BEGIN(0, 2);
4970 IEM_MC_LOCAL(RTUINT256U, uSrc);
4971 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4972
4973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4974 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
4975 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4976 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4977
4978 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
4979 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4980
4981 IEM_MC_ADVANCE_RIP();
4982 IEM_MC_END();
4983 }
4984 return VINF_SUCCESS;
4985 }
4986 /**
4987 * @opdone
4988 * @opmnemonic udvex660fe7reg
4989 * @opcode 0xe7
4990 * @opcodesub 11 mr/reg
4991 * @oppfx 0x66
4992 * @opunused immediate
4993 * @opcpuid avx
4994 * @optest ->
4995 */
4996 return IEMOP_RAISE_INVALID_OPCODE();
4997}
4998
4999/* Opcode VEX.F3.0F 0xe7 - invalid */
5000/* Opcode VEX.F2.0F 0xe7 - invalid */
5001
5002
5003/* Opcode VEX.0F 0xe8 - invalid */
5004/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, W */
5005FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W);
5006/* Opcode VEX.F3.0F 0xe8 - invalid */
5007/* Opcode VEX.F2.0F 0xe8 - invalid */
5008
5009/* Opcode VEX.0F 0xe9 - invalid */
5010/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
5011FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx);
5012/* Opcode VEX.F3.0F 0xe9 - invalid */
5013/* Opcode VEX.F2.0F 0xe9 - invalid */
5014
5015/* Opcode VEX.0F 0xea - invalid */
5016
5017
5018/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
5019FNIEMOP_DEF(iemOp_vpminsw_Vx_Hx_Wx)
5020{
5021 IEMOP_MNEMONIC3(VEX_RVM, VPMINSW, vpminsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5022 IEMOPMEDIAF3_INIT_VARS(vpminsw);
5023 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5024}
5025
5026
5027/* Opcode VEX.F3.0F 0xea - invalid */
5028/* Opcode VEX.F2.0F 0xea - invalid */
5029
5030/* Opcode VEX.0F 0xeb - invalid */
5031
5032
5033/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
5034FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
5035{
5036 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5037 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
5038 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
5039}
5040
5041
5042
5043/* Opcode VEX.F3.0F 0xeb - invalid */
5044/* Opcode VEX.F2.0F 0xeb - invalid */
5045
5046/* Opcode VEX.0F 0xec - invalid */
5047/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
5048FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx);
5049/* Opcode VEX.F3.0F 0xec - invalid */
5050/* Opcode VEX.F2.0F 0xec - invalid */
5051
5052/* Opcode VEX.0F 0xed - invalid */
5053/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
5054FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx);
5055/* Opcode VEX.F3.0F 0xed - invalid */
5056/* Opcode VEX.F2.0F 0xed - invalid */
5057
5058/* Opcode VEX.0F 0xee - invalid */
5059
5060
5061/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, Wx */
5062FNIEMOP_DEF(iemOp_vpmaxsw_Vx_Hx_Wx)
5063{
5064 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSW, vpmaxsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5065 IEMOPMEDIAF3_INIT_VARS(vpmaxsw);
5066 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5067}
5068
5069
5070/* Opcode VEX.F3.0F 0xee - invalid */
5071/* Opcode VEX.F2.0F 0xee - invalid */
5072
5073
5074/* Opcode VEX.0F 0xef - invalid */
5075
5076
5077/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
5078FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
5079{
5080 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5081 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx,
5082 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
5083}
5084
5085
5086/* Opcode VEX.F3.0F 0xef - invalid */
5087/* Opcode VEX.F2.0F 0xef - invalid */
5088
5089/* Opcode VEX.0F 0xf0 - invalid */
5090/* Opcode VEX.66.0F 0xf0 - invalid */
5091
5092
5093/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
5094FNIEMOP_DEF(iemOp_vlddqu_Vx_Mx)
5095{
5096 IEMOP_MNEMONIC2(VEX_RM_MEM, VLDDQU, vlddqu, Vx_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5097 Assert(pVCpu->iem.s.uVexLength <= 1);
5098 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5099 if (IEM_IS_MODRM_REG_MODE(bRm))
5100 {
5101 /*
5102 * Register, register - (not implemented, assuming it raises \#UD).
5103 */
5104 return IEMOP_RAISE_INVALID_OPCODE();
5105 }
5106 else if (pVCpu->iem.s.uVexLength == 0)
5107 {
5108 /*
5109 * Register, memory128.
5110 */
5111 IEM_MC_BEGIN(0, 2);
5112 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5114
5115 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5116 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
5117 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5118 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5119
5120 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5121 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
5122
5123 IEM_MC_ADVANCE_RIP();
5124 IEM_MC_END();
5125 }
5126 else
5127 {
5128 /*
5129 * Register, memory256.
5130 */
5131 IEM_MC_BEGIN(0, 2);
5132 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5134
5135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5136 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();
5137 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5138 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5139
5140 IEM_MC_FETCH_MEM_U256(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5141 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
5142
5143 IEM_MC_ADVANCE_RIP();
5144 IEM_MC_END();
5145 }
5146 return VINF_SUCCESS;
5147}
5148
5149
5150/* Opcode VEX.0F 0xf1 - invalid */
5151/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
5152FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W);
5153/* Opcode VEX.F2.0F 0xf1 - invalid */
5154
5155/* Opcode VEX.0F 0xf2 - invalid */
5156/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
5157FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx);
5158/* Opcode VEX.F2.0F 0xf2 - invalid */
5159
5160/* Opcode VEX.0F 0xf3 - invalid */
5161/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
5162FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx);
5163/* Opcode VEX.F2.0F 0xf3 - invalid */
5164
5165/* Opcode VEX.0F 0xf4 - invalid */
5166
5167
5168/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
5169FNIEMOP_DEF(iemOp_vpmuludq_Vx_Hx_W)
5170{
5171 IEMOP_MNEMONIC3(VEX_RVM, VPMULUDQ, vpmuludq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5172 IEMOPMEDIAOPTF3_INIT_VARS(vpmuludq);
5173 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5174}
5175
5176
5177/* Opcode VEX.F2.0F 0xf4 - invalid */
5178
5179/* Opcode VEX.0F 0xf5 - invalid */
5180/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
5181FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx);
5182/* Opcode VEX.F2.0F 0xf5 - invalid */
5183
5184/* Opcode VEX.0F 0xf6 - invalid */
5185
5186
5187/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
5188FNIEMOP_DEF(iemOp_vpsadbw_Vx_Hx_Wx)
5189{
5190 IEMOP_MNEMONIC3(VEX_RVM, VPSADBW, vpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5191 IEMOPMEDIAOPTF3_INIT_VARS(vpsadbw);
5192 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5193}
5194
5195
5196/* Opcode VEX.F2.0F 0xf6 - invalid */
5197
5198/* Opcode VEX.0F 0xf7 - invalid */
5199/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
5200FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq);
5201/* Opcode VEX.F2.0F 0xf7 - invalid */
5202
5203/* Opcode VEX.0F 0xf8 - invalid */
5204
5205
5206/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
5207FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
5208{
5209 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5210 IEMOPMEDIAF3_INIT_VARS( vpsubb);
5211 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5212}
5213
5214
5215/* Opcode VEX.F2.0F 0xf8 - invalid */
5216
5217/* Opcode VEX.0F 0xf9 - invalid */
5218
5219
5220/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
5221FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
5222{
5223 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5224 IEMOPMEDIAF3_INIT_VARS( vpsubw);
5225 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5226}
5227
5228
5229/* Opcode VEX.F2.0F 0xf9 - invalid */
5230
5231/* Opcode VEX.0F 0xfa - invalid */
5232
5233
5234/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
5235FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
5236{
5237 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5238 IEMOPMEDIAF3_INIT_VARS( vpsubd);
5239 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5240}
5241
5242
5243/* Opcode VEX.F2.0F 0xfa - invalid */
5244
5245/* Opcode VEX.0F 0xfb - invalid */
5246
5247
5248/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
5249FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
5250{
5251 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5252 IEMOPMEDIAF3_INIT_VARS( vpsubq);
5253 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5254}
5255
5256
5257/* Opcode VEX.F2.0F 0xfb - invalid */
5258
5259/* Opcode VEX.0F 0xfc - invalid */
5260
5261
5262/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
5263FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
5264{
5265 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5266 IEMOPMEDIAF3_INIT_VARS( vpaddb);
5267 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5268}
5269
5270
5271/* Opcode VEX.F2.0F 0xfc - invalid */
5272
5273/* Opcode VEX.0F 0xfd - invalid */
5274
5275
5276/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
5277FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
5278{
5279 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5280 IEMOPMEDIAF3_INIT_VARS( vpaddw);
5281 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5282}
5283
5284
5285/* Opcode VEX.F2.0F 0xfd - invalid */
5286
5287/* Opcode VEX.0F 0xfe - invalid */
5288
5289
5290/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
5291FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
5292{
5293 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5294 IEMOPMEDIAF3_INIT_VARS( vpaddd);
5295 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5296}
5297
5298
5299/* Opcode VEX.F2.0F 0xfe - invalid */
5300
5301
5302/** Opcode **** 0x0f 0xff - UD0 */
5303FNIEMOP_DEF(iemOp_vud0)
5304{
5305 IEMOP_MNEMONIC(vud0, "vud0");
5306 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
5307 {
5308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
5309#ifndef TST_IEM_CHECK_MC
5310 RTGCPTR GCPtrEff;
5311 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
5312 if (rcStrict != VINF_SUCCESS)
5313 return rcStrict;
5314#endif
5315 IEMOP_HLP_DONE_DECODING();
5316 }
5317 return IEMOP_RAISE_INVALID_OPCODE();
5318}
5319
5320
5321
5322/**
5323 * VEX opcode map \#1.
5324 *
5325 * @sa g_apfnTwoByteMap
5326 */
5327IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =
5328{
5329 /* no prefix, 066h prefix f3h prefix, f2h prefix */
5330 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
5331 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
5332 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
5333 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
5334 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
5335 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
5336 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
5337 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
5338 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
5339 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
5340 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
5341 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
5342 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
5343 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
5344 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
5345 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
5346
5347 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
5348 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
5349 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
5350 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5351 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5352 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5353 /* 0x16 */ iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpd_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
5354 /* 0x17 */ iemOp_vmovhps_Mq_Vq, iemOp_vmovhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5355 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
5356 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
5357 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
5358 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
5359 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
5360 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
5361 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
5362 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
5363
5364 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
5365 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
5366 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
5367 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
5368 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
5369 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
5370 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
5371 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
5372 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5373 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5374 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
5375 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5376 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
5377 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
5378 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5379 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5380
5381 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
5382 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
5383 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
5384 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
5385 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
5386 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
5387 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
5388 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
5389 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5390 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5391 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5392 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5393 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5394 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5395 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5396 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5397
5398 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
5399 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
5400 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
5401 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
5402 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
5403 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
5404 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
5405 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
5406 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
5407 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
5408 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
5409 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
5410 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
5411 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
5412 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
5413 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
5414
5415 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5416 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
5417 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5418 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5419 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5420 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5421 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5422 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5423 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
5424 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
5425 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
5426 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
5427 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
5428 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
5429 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
5430 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
5431
5432 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5433 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5434 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5435 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5436 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5437 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5438 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5439 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5440 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5441 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5442 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5443 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5444 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5445 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5446 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5447 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
5448
5449 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
5450 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5451 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5452 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5453 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5454 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5455 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5456 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5457 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
5458 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
5459 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
5460 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
5461 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
5462 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
5463 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
5464 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
5465
5466 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
5467 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
5468 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
5469 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
5470 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
5471 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
5472 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
5473 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
5474 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
5475 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
5476 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
5477 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
5478 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
5479 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
5480 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
5481 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
5482
5483 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
5484 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
5485 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
5486 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
5487 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
5488 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
5489 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
5490 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
5491 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
5492 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
5493 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
5494 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
5495 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
5496 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
5497 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
5498 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
5499
5500 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5501 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5502 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
5503 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5504 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
5505 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
5506 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
5507 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5508 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5509 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5510 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
5511 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
5512 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
5513 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
5514 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
5515 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
5516
5517 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5518 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5519 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
5520 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5521 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
5522 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
5523 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
5524 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5525 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5526 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5527 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
5528 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
5529 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
5530 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
5531 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
5532 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
5533
5534 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5535 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5536 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
5537 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5538 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
5539 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
5540 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
5541 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5542 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5543 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5544 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
5545 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
5546 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
5547 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
5548 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
5549 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
5550
5551 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
5552 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5553 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5554 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5555 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5556 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5557 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5558 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5559 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5560 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5561 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5562 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5563 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5564 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5565 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5566 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5567
5568 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5569 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5570 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5571 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5572 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5573 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5574 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
5575 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5576 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5577 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5578 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5579 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5580 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5581 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5582 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5583 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5584
5585 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
5586 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5587 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5588 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5589 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5590 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5591 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5592 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5593 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5594 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5595 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5596 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5597 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5598 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5599 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5600 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
5601};
5602AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
5603/** @} */
5604
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette