VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap1.cpp.h@ 105184

Last change on this file since 105184 was 105184, checked in by vboxsync, 6 months ago

VMM/IEM: Implement vaddps instruction emulation, bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 223.4 KB
Line 
1/* $Id: IEMAllInstVexMap1.cpp.h 105184 2024-07-08 12:27:15Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 1
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128
39 * - vpxxx ymm0, ymm1, ymm2/mem256
40 *
41 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
42 */
43FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
44{
45 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
46 if (IEM_IS_MODRM_REG_MODE(bRm))
47 {
48 /*
49 * Register, register.
50 */
51 if (pVCpu->iem.s.uVexLength)
52 {
53 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
54 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
55 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
56 IEM_MC_PREPARE_AVX_USAGE();
57
58 IEM_MC_LOCAL(X86YMMREG, uSrc1);
59 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc1, uSrc1, 1);
60 IEM_MC_FETCH_YREG_YMM(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
61
62 IEM_MC_LOCAL(X86YMMREG, uSrc2);
63 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc2, uSrc2, 2);
64 IEM_MC_FETCH_YREG_YMM(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
65
66 IEM_MC_LOCAL(X86YMMREG, uDst);
67 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
68
69 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
70
71 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
72 IEM_MC_ADVANCE_RIP_AND_FINISH();
73 IEM_MC_END();
74 }
75 else
76 {
77 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
78 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
79 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
80 IEM_MC_PREPARE_AVX_USAGE();
81
82 IEM_MC_ARG(PX86XMMREG, puDst, 0);
83 IEM_MC_REF_XREG_XMM(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
84 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
85 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
86 IEM_MC_ARG(PCX86XMMREG, puSrc2, 2);
87 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
88 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
89 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
90 IEM_MC_ADVANCE_RIP_AND_FINISH();
91 IEM_MC_END();
92 }
93 }
94 else
95 {
96 /*
97 * Register, memory.
98 */
99 if (pVCpu->iem.s.uVexLength)
100 {
101 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
102 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
104 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
105 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
106 IEM_MC_PREPARE_AVX_USAGE();
107
108 IEM_MC_LOCAL(X86YMMREG, uSrc2);
109 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc2, uSrc2, 2);
110 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
111
112 IEM_MC_LOCAL(X86YMMREG, uSrc1);
113 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc1, uSrc1, 1);
114 IEM_MC_FETCH_YREG_YMM(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
115
116 IEM_MC_LOCAL(X86YMMREG, uDst);
117 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
118
119 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
120
121 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
122 IEM_MC_ADVANCE_RIP_AND_FINISH();
123 IEM_MC_END();
124 }
125 else
126 {
127 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
130 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
131 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
132 IEM_MC_PREPARE_AVX_USAGE();
133
134 IEM_MC_LOCAL(X86XMMREG, uSrc2);
135 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 2);
136 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
137
138 IEM_MC_ARG(PX86XMMREG, puDst, 0);
139 IEM_MC_REF_XREG_XMM(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
140 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
141 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
142
143 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
144 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
145
146 IEM_MC_ADVANCE_RIP_AND_FINISH();
147 IEM_MC_END();
148 }
149 }
150}
151
152
153/**
154 * Common worker for AVX2 instructions on the forms:
155 * - vpxxx xmm0, xmm1, xmm2/mem128
156 * - vpxxx ymm0, ymm1, ymm2/mem256
157 *
158 * Takes function table for function w/o implicit state parameter.
159 *
160 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
161 */
162FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
163{
164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
165 if (IEM_IS_MODRM_REG_MODE(bRm))
166 {
167 /*
168 * Register, register.
169 */
170 if (pVCpu->iem.s.uVexLength)
171 {
172 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
173 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
174 IEM_MC_LOCAL(RTUINT256U, uDst);
175 IEM_MC_LOCAL(RTUINT256U, uSrc1);
176 IEM_MC_LOCAL(RTUINT256U, uSrc2);
177 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
178 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
179 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
180 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
181 IEM_MC_PREPARE_AVX_USAGE();
182 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
183 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
184 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
185 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
186 IEM_MC_ADVANCE_RIP_AND_FINISH();
187 IEM_MC_END();
188 }
189 else
190 {
191 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
192 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
193 IEM_MC_ARG(PRTUINT128U, puDst, 0);
194 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
195 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
196 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
197 IEM_MC_PREPARE_AVX_USAGE();
198 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
199 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
200 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
201 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
202 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
203 IEM_MC_ADVANCE_RIP_AND_FINISH();
204 IEM_MC_END();
205 }
206 }
207 else
208 {
209 /*
210 * Register, memory.
211 */
212 if (pVCpu->iem.s.uVexLength)
213 {
214 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
215 IEM_MC_LOCAL(RTUINT256U, uDst);
216 IEM_MC_LOCAL(RTUINT256U, uSrc1);
217 IEM_MC_LOCAL(RTUINT256U, uSrc2);
218 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
219 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
220 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
221 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
222
223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
224 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
225 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
226 IEM_MC_PREPARE_AVX_USAGE();
227
228 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
229 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
230 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
231 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
232
233 IEM_MC_ADVANCE_RIP_AND_FINISH();
234 IEM_MC_END();
235 }
236 else
237 {
238 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
239 IEM_MC_LOCAL(RTUINT128U, uSrc2);
240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
241 IEM_MC_ARG(PRTUINT128U, puDst, 0);
242 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
243 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
244
245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
246 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
247 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
248 IEM_MC_PREPARE_AVX_USAGE();
249
250 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
251 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
252 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
253 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
254 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
255
256 IEM_MC_ADVANCE_RIP_AND_FINISH();
257 IEM_MC_END();
258 }
259 }
260}
261
262
263/**
264 * Common worker for AVX2 instructions on the forms:
265 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
266 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
267 *
268 * The 128-bit memory version of this instruction may elect to skip fetching the
269 * lower 64 bits of the operand. We, however, do not.
270 *
271 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
272 */
273FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
274{
275 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
276}
277
278
279/**
280 * Common worker for AVX2 instructions on the forms:
281 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
282 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
283 *
284 * The 128-bit memory version of this instruction may elect to skip fetching the
285 * higher 64 bits of the operand. We, however, do not.
286 *
287 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
288 */
289FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
290{
291 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
292}
293
294
295/**
296 * Common worker for AVX2 instructions on the forms:
297 * - vpxxx xmm0, xmm1/mem128
298 * - vpxxx ymm0, ymm1/mem256
299 *
300 * Takes function table for function w/o implicit state parameter.
301 *
302 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
303 */
304FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, PCIEMOPMEDIAOPTF2, pImpl)
305{
306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
307 if (IEM_IS_MODRM_REG_MODE(bRm))
308 {
309 /*
310 * Register, register.
311 */
312 if (pVCpu->iem.s.uVexLength)
313 {
314 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
315 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
316 IEM_MC_LOCAL(RTUINT256U, uDst);
317 IEM_MC_LOCAL(RTUINT256U, uSrc);
318 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
319 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
320 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
321 IEM_MC_PREPARE_AVX_USAGE();
322 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
323 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
324 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
325 IEM_MC_ADVANCE_RIP_AND_FINISH();
326 IEM_MC_END();
327 }
328 else
329 {
330 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
331 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
332 IEM_MC_ARG(PRTUINT128U, puDst, 0);
333 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
334 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
335 IEM_MC_PREPARE_AVX_USAGE();
336 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
337 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
338 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
339 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
340 IEM_MC_ADVANCE_RIP_AND_FINISH();
341 IEM_MC_END();
342 }
343 }
344 else
345 {
346 /*
347 * Register, memory.
348 */
349 if (pVCpu->iem.s.uVexLength)
350 {
351 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
352 IEM_MC_LOCAL(RTUINT256U, uDst);
353 IEM_MC_LOCAL(RTUINT256U, uSrc);
354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
355 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
356 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
357
358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
359 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
360 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
361 IEM_MC_PREPARE_AVX_USAGE();
362
363 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
364 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
365 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
366
367 IEM_MC_ADVANCE_RIP_AND_FINISH();
368 IEM_MC_END();
369 }
370 else
371 {
372 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
373 IEM_MC_LOCAL(RTUINT128U, uSrc);
374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
375 IEM_MC_ARG(PRTUINT128U, puDst, 0);
376 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
377
378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
379 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
380 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
381 IEM_MC_PREPARE_AVX_USAGE();
382
383 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
384 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
385 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
386 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
387
388 IEM_MC_ADVANCE_RIP_AND_FINISH();
389 IEM_MC_END();
390 }
391 }
392}
393
394
395/* Opcode VEX.0F 0x00 - invalid */
396/* Opcode VEX.0F 0x01 - invalid */
397/* Opcode VEX.0F 0x02 - invalid */
398/* Opcode VEX.0F 0x03 - invalid */
399/* Opcode VEX.0F 0x04 - invalid */
400/* Opcode VEX.0F 0x05 - invalid */
401/* Opcode VEX.0F 0x06 - invalid */
402/* Opcode VEX.0F 0x07 - invalid */
403/* Opcode VEX.0F 0x08 - invalid */
404/* Opcode VEX.0F 0x09 - invalid */
405/* Opcode VEX.0F 0x0a - invalid */
406
407/** Opcode VEX.0F 0x0b. */
408FNIEMOP_DEF(iemOp_vud2)
409{
410 IEMOP_MNEMONIC(vud2, "vud2");
411 IEMOP_RAISE_INVALID_OPCODE_RET();
412}
413
414/* Opcode VEX.0F 0x0c - invalid */
415/* Opcode VEX.0F 0x0d - invalid */
416/* Opcode VEX.0F 0x0e - invalid */
417/* Opcode VEX.0F 0x0f - invalid */
418
419
420/**
421 * @opcode 0x10
422 * @oppfx none
423 * @opcpuid avx
424 * @opgroup og_avx_simdfp_datamove
425 * @opxcpttype 4UA
426 * @optest op1=1 op2=2 -> op1=2
427 * @optest op1=0 op2=-22 -> op1=-22
428 */
429FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
430{
431 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
432 Assert(pVCpu->iem.s.uVexLength <= 1);
433 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
434 if (IEM_IS_MODRM_REG_MODE(bRm))
435 {
436 /*
437 * Register, register.
438 */
439 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
440 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
441 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
442 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
443 if (pVCpu->iem.s.uVexLength == 0)
444 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
445 IEM_GET_MODRM_RM(pVCpu, bRm));
446 else
447 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
448 IEM_GET_MODRM_RM(pVCpu, bRm));
449 IEM_MC_ADVANCE_RIP_AND_FINISH();
450 IEM_MC_END();
451 }
452 else if (pVCpu->iem.s.uVexLength == 0)
453 {
454 /*
455 * 128-bit: Register, Memory
456 */
457 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
458 IEM_MC_LOCAL(RTUINT128U, uSrc);
459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
460
461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
462 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
463 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
464 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
465
466 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
467 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
468
469 IEM_MC_ADVANCE_RIP_AND_FINISH();
470 IEM_MC_END();
471 }
472 else
473 {
474 /*
475 * 256-bit: Register, Memory
476 */
477 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
478 IEM_MC_LOCAL(RTUINT256U, uSrc);
479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
480
481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
482 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
483 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
484 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
485
486 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
487 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
488
489 IEM_MC_ADVANCE_RIP_AND_FINISH();
490 IEM_MC_END();
491 }
492}
493
494
495/**
496 * @opcode 0x10
497 * @oppfx 0x66
498 * @opcpuid avx
499 * @opgroup og_avx_simdfp_datamove
500 * @opxcpttype 4UA
501 * @optest op1=1 op2=2 -> op1=2
502 * @optest op1=0 op2=-22 -> op1=-22
503 */
504FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
505{
506 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
507 Assert(pVCpu->iem.s.uVexLength <= 1);
508 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
509 if (IEM_IS_MODRM_REG_MODE(bRm))
510 {
511 /*
512 * Register, register.
513 */
514 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
515 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
516 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
517 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
518 if (pVCpu->iem.s.uVexLength == 0)
519 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
520 IEM_GET_MODRM_RM(pVCpu, bRm));
521 else
522 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
523 IEM_GET_MODRM_RM(pVCpu, bRm));
524 IEM_MC_ADVANCE_RIP_AND_FINISH();
525 IEM_MC_END();
526 }
527 else if (pVCpu->iem.s.uVexLength == 0)
528 {
529 /*
530 * 128-bit: Memory, register.
531 */
532 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
533 IEM_MC_LOCAL(RTUINT128U, uSrc);
534 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
535
536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
537 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
538 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
539 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
540
541 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
542 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
543
544 IEM_MC_ADVANCE_RIP_AND_FINISH();
545 IEM_MC_END();
546 }
547 else
548 {
549 /*
550 * 256-bit: Memory, register.
551 */
552 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
553 IEM_MC_LOCAL(RTUINT256U, uSrc);
554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
555
556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
557 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
558 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
559 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
560
561 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
562 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
563
564 IEM_MC_ADVANCE_RIP_AND_FINISH();
565 IEM_MC_END();
566 }
567}
568
569
570FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
571{
572 Assert(pVCpu->iem.s.uVexLength <= 1);
573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
574 if (IEM_IS_MODRM_REG_MODE(bRm))
575 {
576 /**
577 * @opcode 0x10
578 * @oppfx 0xf3
579 * @opcodesub 11 mr/reg
580 * @opcpuid avx
581 * @opgroup og_avx_simdfp_datamerge
582 * @opxcpttype 5
583 * @optest op1=1 op2=0 op3=2 -> op1=2
584 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
585 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
586 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
587 * @note HssHi refers to bits 127:32.
588 */
589 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
590 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
591 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
592 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
593 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
594 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
595 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
596 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
597 IEM_MC_ADVANCE_RIP_AND_FINISH();
598 IEM_MC_END();
599 }
600 else
601 {
602 /**
603 * @opdone
604 * @opcode 0x10
605 * @oppfx 0xf3
606 * @opcodesub !11 mr/reg
607 * @opcpuid avx
608 * @opgroup og_avx_simdfp_datamove
609 * @opxcpttype 5
610 * @opfunction iemOp_vmovss_Vss_Hss_Wss
611 * @optest op1=1 op2=2 -> op1=2
612 * @optest op1=0 op2=-22 -> op1=-22
613 */
614 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
615 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
616 IEM_MC_LOCAL(uint32_t, uSrc);
617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
618
619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
620 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
621 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
622 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
623
624 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
625 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
626
627 IEM_MC_ADVANCE_RIP_AND_FINISH();
628 IEM_MC_END();
629 }
630}
631
632
633FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
634{
635 Assert(pVCpu->iem.s.uVexLength <= 1);
636 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
637 if (IEM_IS_MODRM_REG_MODE(bRm))
638 {
639 /**
640 * @opcode 0x10
641 * @oppfx 0xf2
642 * @opcodesub 11 mr/reg
643 * @opcpuid avx
644 * @opgroup og_avx_simdfp_datamerge
645 * @opxcpttype 5
646 * @optest op1=1 op2=0 op3=2 -> op1=2
647 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
648 * @optest op1=3 op2=-1 op3=0x77 ->
649 * op1=0xffffffffffffffff0000000000000077
650 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
651 */
652 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
653 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
654 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
655
656 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
657 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
658 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
659 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
660 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
661 IEM_MC_ADVANCE_RIP_AND_FINISH();
662 IEM_MC_END();
663 }
664 else
665 {
666 /**
667 * @opdone
668 * @opcode 0x10
669 * @oppfx 0xf2
670 * @opcodesub !11 mr/reg
671 * @opcpuid avx
672 * @opgroup og_avx_simdfp_datamove
673 * @opxcpttype 5
674 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
675 * @optest op1=1 op2=2 -> op1=2
676 * @optest op1=0 op2=-22 -> op1=-22
677 */
678 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
679 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
680 IEM_MC_LOCAL(uint64_t, uSrc);
681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
682
683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
684 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
685 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
686 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
687
688 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
689 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
690
691 IEM_MC_ADVANCE_RIP_AND_FINISH();
692 IEM_MC_END();
693 }
694}
695
696
697/**
698 * @opcode 0x11
699 * @oppfx none
700 * @opcpuid avx
701 * @opgroup og_avx_simdfp_datamove
702 * @opxcpttype 4UA
703 * @optest op1=1 op2=2 -> op1=2
704 * @optest op1=0 op2=-22 -> op1=-22
705 */
706FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
707{
708 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
709 Assert(pVCpu->iem.s.uVexLength <= 1);
710 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
711 if (IEM_IS_MODRM_REG_MODE(bRm))
712 {
713 /*
714 * Register, register.
715 */
716 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
717 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
718 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
719 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
720 if (pVCpu->iem.s.uVexLength == 0)
721 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
722 IEM_GET_MODRM_REG(pVCpu, bRm));
723 else
724 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
725 IEM_GET_MODRM_REG(pVCpu, bRm));
726 IEM_MC_ADVANCE_RIP_AND_FINISH();
727 IEM_MC_END();
728 }
729 else if (pVCpu->iem.s.uVexLength == 0)
730 {
731 /*
732 * 128-bit: Memory, register.
733 */
734 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
735 IEM_MC_LOCAL(RTUINT128U, uSrc);
736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
737
738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
739 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
740 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
741 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
742
743 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
744 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
745
746 IEM_MC_ADVANCE_RIP_AND_FINISH();
747 IEM_MC_END();
748 }
749 else
750 {
751 /*
752 * 256-bit: Memory, register.
753 */
754 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
755 IEM_MC_LOCAL(RTUINT256U, uSrc);
756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
757
758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
759 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
760 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
761 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
762
763 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
764 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
765
766 IEM_MC_ADVANCE_RIP_AND_FINISH();
767 IEM_MC_END();
768 }
769}
770
771
772/**
773 * @opcode 0x11
774 * @oppfx 0x66
775 * @opcpuid avx
776 * @opgroup og_avx_simdfp_datamove
777 * @opxcpttype 4UA
778 * @optest op1=1 op2=2 -> op1=2
779 * @optest op1=0 op2=-22 -> op1=-22
780 */
781FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
782{
783 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
784 Assert(pVCpu->iem.s.uVexLength <= 1);
785 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
786 if (IEM_IS_MODRM_REG_MODE(bRm))
787 {
788 /*
789 * Register, register.
790 */
791 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
792 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
793 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
794 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
795 if (pVCpu->iem.s.uVexLength == 0)
796 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
797 IEM_GET_MODRM_REG(pVCpu, bRm));
798 else
799 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
800 IEM_GET_MODRM_REG(pVCpu, bRm));
801 IEM_MC_ADVANCE_RIP_AND_FINISH();
802 IEM_MC_END();
803 }
804 else if (pVCpu->iem.s.uVexLength == 0)
805 {
806 /*
807 * 128-bit: Memory, register.
808 */
809 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
810 IEM_MC_LOCAL(RTUINT128U, uSrc);
811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
812
813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
814 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
815 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
816 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
817
818 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
819 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
820
821 IEM_MC_ADVANCE_RIP_AND_FINISH();
822 IEM_MC_END();
823 }
824 else
825 {
826 /*
827 * 256-bit: Memory, register.
828 */
829 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
830 IEM_MC_LOCAL(RTUINT256U, uSrc);
831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
832
833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
834 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
835 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
836 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
837
838 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
839 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
840
841 IEM_MC_ADVANCE_RIP_AND_FINISH();
842 IEM_MC_END();
843 }
844}
845
846
847FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
848{
849 Assert(pVCpu->iem.s.uVexLength <= 1);
850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
851 if (IEM_IS_MODRM_REG_MODE(bRm))
852 {
853 /**
854 * @opcode 0x11
855 * @oppfx 0xf3
856 * @opcodesub 11 mr/reg
857 * @opcpuid avx
858 * @opgroup og_avx_simdfp_datamerge
859 * @opxcpttype 5
860 * @optest op1=1 op2=0 op3=2 -> op1=2
861 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
862 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
863 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
864 */
865 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
866 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
867 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
868
869 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
870 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
871 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
872 IEM_GET_MODRM_REG(pVCpu, bRm),
873 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
874 IEM_MC_ADVANCE_RIP_AND_FINISH();
875 IEM_MC_END();
876 }
877 else
878 {
879 /**
880 * @opdone
881 * @opcode 0x11
882 * @oppfx 0xf3
883 * @opcodesub !11 mr/reg
884 * @opcpuid avx
885 * @opgroup og_avx_simdfp_datamove
886 * @opxcpttype 5
887 * @opfunction iemOp_vmovss_Vss_Hss_Wss
888 * @optest op1=1 op2=2 -> op1=2
889 * @optest op1=0 op2=-22 -> op1=-22
890 */
891 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
892 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
893 IEM_MC_LOCAL(uint32_t, uSrc);
894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
895
896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
897 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
898 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
899 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
900
901 IEM_MC_FETCH_YREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
902 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
903
904 IEM_MC_ADVANCE_RIP_AND_FINISH();
905 IEM_MC_END();
906 }
907}
908
909
910FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
911{
912 Assert(pVCpu->iem.s.uVexLength <= 1);
913 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
914 if (IEM_IS_MODRM_REG_MODE(bRm))
915 {
916 /**
917 * @opcode 0x11
918 * @oppfx 0xf2
919 * @opcodesub 11 mr/reg
920 * @opcpuid avx
921 * @opgroup og_avx_simdfp_datamerge
922 * @opxcpttype 5
923 * @optest op1=1 op2=0 op3=2 -> op1=2
924 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
925 * @optest op1=3 op2=-1 op3=0x77 ->
926 * op1=0xffffffffffffffff0000000000000077
927 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
928 */
929 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
930 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
931 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
932
933 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
934 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
935 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
936 IEM_GET_MODRM_REG(pVCpu, bRm),
937 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
938 IEM_MC_ADVANCE_RIP_AND_FINISH();
939 IEM_MC_END();
940 }
941 else
942 {
943 /**
944 * @opdone
945 * @opcode 0x11
946 * @oppfx 0xf2
947 * @opcodesub !11 mr/reg
948 * @opcpuid avx
949 * @opgroup og_avx_simdfp_datamove
950 * @opxcpttype 5
951 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
952 * @optest op1=1 op2=2 -> op1=2
953 * @optest op1=0 op2=-22 -> op1=-22
954 */
955 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
956 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
957 IEM_MC_LOCAL(uint64_t, uSrc);
958 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
959
960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
961 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
962 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
963 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
964
965 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
966 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
967
968 IEM_MC_ADVANCE_RIP_AND_FINISH();
969 IEM_MC_END();
970 }
971}
972
973
974FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
975{
976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
977 if (IEM_IS_MODRM_REG_MODE(bRm))
978 {
979 /**
980 * @opcode 0x12
981 * @opcodesub 11 mr/reg
982 * @oppfx none
983 * @opcpuid avx
984 * @opgroup og_avx_simdfp_datamerge
985 * @opxcpttype 7LZ
986 * @optest op2=0x2200220122022203
987 * op3=0x3304330533063307
988 * -> op1=0x22002201220222033304330533063307
989 * @optest op2=-1 op3=-42 -> op1=-42
990 * @note op3 and op2 are only the 8-byte high XMM register halfs.
991 */
992 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
993 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
994 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
995
996 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
997 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
998 IEM_MC_MERGE_YREG_U64HI_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
999 IEM_GET_MODRM_RM(pVCpu, bRm),
1000 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1001
1002 IEM_MC_ADVANCE_RIP_AND_FINISH();
1003 IEM_MC_END();
1004 }
1005 else
1006 {
1007 /**
1008 * @opdone
1009 * @opcode 0x12
1010 * @opcodesub !11 mr/reg
1011 * @oppfx none
1012 * @opcpuid avx
1013 * @opgroup og_avx_simdfp_datamove
1014 * @opxcpttype 5LZ
1015 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1016 * @optest op1=1 op2=0 op3=0 -> op1=0
1017 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
1018 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
1019 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
1020 */
1021 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1022
1023 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1024 IEM_MC_LOCAL(uint64_t, uSrc);
1025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1026
1027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1028 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1029 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1030 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1031
1032 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1033 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1034 uSrc,
1035 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1036
1037 IEM_MC_ADVANCE_RIP_AND_FINISH();
1038 IEM_MC_END();
1039 }
1040}
1041
1042
1043/**
1044 * @opcode 0x12
1045 * @opcodesub !11 mr/reg
1046 * @oppfx 0x66
1047 * @opcpuid avx
1048 * @opgroup og_avx_pcksclr_datamerge
1049 * @opxcpttype 5LZ
1050 * @optest op2=0 op3=2 -> op1=2
1051 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
1052 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
1053 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
1054 */
1055FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1056{
1057 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1058 if (IEM_IS_MODRM_MEM_MODE(bRm))
1059 {
1060 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1061
1062 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1063 IEM_MC_LOCAL(uint64_t, uSrc);
1064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1065
1066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1067 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1068 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1069 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1070
1071 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1072 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1073 uSrc,
1074 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1075
1076 IEM_MC_ADVANCE_RIP_AND_FINISH();
1077 IEM_MC_END();
1078 }
1079
1080 /**
1081 * @opdone
1082 * @opmnemonic udvex660f12m3
1083 * @opcode 0x12
1084 * @opcodesub 11 mr/reg
1085 * @oppfx 0x66
1086 * @opunused immediate
1087 * @opcpuid avx
1088 * @optest ->
1089 */
1090 else
1091 IEMOP_RAISE_INVALID_OPCODE_RET();
1092}
1093
1094
1095/**
1096 * @opcode 0x12
1097 * @oppfx 0xf3
1098 * @opcpuid avx
1099 * @opgroup og_avx_pcksclr_datamove
1100 * @opxcpttype 4
1101 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1102 * -> op1=0x00000002000000020000000100000001
1103 * @optest vex.l==1 /
1104 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1105 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1106 */
1107FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1108{
1109 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1110 Assert(pVCpu->iem.s.uVexLength <= 1);
1111 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1112 if (IEM_IS_MODRM_REG_MODE(bRm))
1113 {
1114 /*
1115 * Register, register.
1116 */
1117 if (pVCpu->iem.s.uVexLength == 0)
1118 {
1119 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1120 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1121 IEM_MC_LOCAL(RTUINT128U, uSrc);
1122
1123 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1124 IEM_MC_PREPARE_AVX_USAGE();
1125
1126 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1127 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1128 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1129 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1130 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1131 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1132
1133 IEM_MC_ADVANCE_RIP_AND_FINISH();
1134 IEM_MC_END();
1135 }
1136 else
1137 {
1138 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1139 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1140 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1141 IEM_MC_PREPARE_AVX_USAGE();
1142
1143 IEM_MC_LOCAL(RTUINT256U, uSrc);
1144 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1145 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1146 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1147 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1148 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1149 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 4);
1150 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 4);
1151 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 6);
1152 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 6);
1153 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1154
1155 IEM_MC_ADVANCE_RIP_AND_FINISH();
1156 IEM_MC_END();
1157 }
1158 }
1159 else
1160 {
1161 /*
1162 * Register, memory.
1163 */
1164 if (pVCpu->iem.s.uVexLength == 0)
1165 {
1166 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1167 IEM_MC_LOCAL(RTUINT128U, uSrc);
1168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1169
1170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1171 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1172 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1173 IEM_MC_PREPARE_AVX_USAGE();
1174
1175 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1176 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1177 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1178 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1179 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1180 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1181
1182 IEM_MC_ADVANCE_RIP_AND_FINISH();
1183 IEM_MC_END();
1184 }
1185 else
1186 {
1187 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1190 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1191 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1192 IEM_MC_PREPARE_AVX_USAGE();
1193
1194 IEM_MC_LOCAL(RTUINT256U, uSrc);
1195 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1196
1197 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1198 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1199 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1200 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1201 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 4);
1202 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 4);
1203 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 6);
1204 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 6);
1205 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1206
1207 IEM_MC_ADVANCE_RIP_AND_FINISH();
1208 IEM_MC_END();
1209 }
1210 }
1211}
1212
1213
1214/**
1215 * @opcode 0x12
1216 * @oppfx 0xf2
1217 * @opcpuid avx
1218 * @opgroup og_avx_pcksclr_datamove
1219 * @opxcpttype 5
1220 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1221 * -> op1=0x22222222111111112222222211111111
1222 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1223 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1224 */
1225FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1226{
1227 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1228 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1229 if (IEM_IS_MODRM_REG_MODE(bRm))
1230 {
1231 /*
1232 * Register, register.
1233 */
1234 if (pVCpu->iem.s.uVexLength == 0)
1235 {
1236 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1237 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1238 IEM_MC_LOCAL(uint64_t, uSrc);
1239
1240 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1241 IEM_MC_PREPARE_AVX_USAGE();
1242
1243 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1244 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1245 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1246 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1247
1248 IEM_MC_ADVANCE_RIP_AND_FINISH();
1249 IEM_MC_END();
1250 }
1251 else
1252 {
1253 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1254 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1255 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1256 IEM_MC_PREPARE_AVX_USAGE();
1257
1258 IEM_MC_LOCAL(uint64_t, uSrc1);
1259 IEM_MC_LOCAL(uint64_t, uSrc2);
1260 IEM_MC_FETCH_YREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1261 IEM_MC_FETCH_YREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 2 /* a_iQword*/);
1262
1263 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc1);
1264 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc1);
1265 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 2 /* a_iQword*/, uSrc2);
1266 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 3 /* a_iQword*/, uSrc2);
1267 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1268
1269 IEM_MC_ADVANCE_RIP_AND_FINISH();
1270 IEM_MC_END();
1271 }
1272 }
1273 else
1274 {
1275 /*
1276 * Register, memory.
1277 */
1278 if (pVCpu->iem.s.uVexLength == 0)
1279 {
1280 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1282 IEM_MC_LOCAL(uint64_t, uSrc);
1283
1284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1285 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1286 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1287 IEM_MC_PREPARE_AVX_USAGE();
1288
1289 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1290 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1291 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1292 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1293
1294 IEM_MC_ADVANCE_RIP_AND_FINISH();
1295 IEM_MC_END();
1296 }
1297 else
1298 {
1299 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1301
1302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1303 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1304 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1305 IEM_MC_PREPARE_AVX_USAGE();
1306
1307 IEM_MC_LOCAL(RTUINT256U, uSrc);
1308 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1309
1310 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQwDst*/, uSrc, 0 /*a_iQwSrc*/);
1311 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQwDst*/, uSrc, 0 /*a_iQwSrc*/);
1312 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2 /*a_iQwDst*/, uSrc, 2 /*a_iQwSrc*/);
1313 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3 /*a_iQwDst*/, uSrc, 2 /*a_iQwSrc*/);
1314 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1315
1316 IEM_MC_ADVANCE_RIP_AND_FINISH();
1317 IEM_MC_END();
1318 }
1319 }
1320}
1321
1322
1323/**
1324 * @opcode 0x13
1325 * @opcodesub !11 mr/reg
1326 * @oppfx none
1327 * @opcpuid avx
1328 * @opgroup og_avx_simdfp_datamove
1329 * @opxcpttype 5
1330 * @optest op1=1 op2=2 -> op1=2
1331 * @optest op1=0 op2=-42 -> op1=-42
1332 */
1333FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1334{
1335 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1336 if (IEM_IS_MODRM_MEM_MODE(bRm))
1337 {
1338 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1339
1340 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1341 IEM_MC_LOCAL(uint64_t, uSrc);
1342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1343
1344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1345 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1346 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1347 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1348
1349 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1350 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1351
1352 IEM_MC_ADVANCE_RIP_AND_FINISH();
1353 IEM_MC_END();
1354 }
1355
1356 /**
1357 * @opdone
1358 * @opmnemonic udvex0f13m3
1359 * @opcode 0x13
1360 * @opcodesub 11 mr/reg
1361 * @oppfx none
1362 * @opunused immediate
1363 * @opcpuid avx
1364 * @optest ->
1365 */
1366 else
1367 IEMOP_RAISE_INVALID_OPCODE_RET();
1368}
1369
1370
1371/**
1372 * @opcode 0x13
1373 * @opcodesub !11 mr/reg
1374 * @oppfx 0x66
1375 * @opcpuid avx
1376 * @opgroup og_avx_pcksclr_datamove
1377 * @opxcpttype 5
1378 * @optest op1=1 op2=2 -> op1=2
1379 * @optest op1=0 op2=-42 -> op1=-42
1380 */
1381FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1382{
1383 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1384 if (IEM_IS_MODRM_MEM_MODE(bRm))
1385 {
1386 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1387 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1388 IEM_MC_LOCAL(uint64_t, uSrc);
1389 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1390
1391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1392 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1393 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1394 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1395
1396 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1397 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1398
1399 IEM_MC_ADVANCE_RIP_AND_FINISH();
1400 IEM_MC_END();
1401 }
1402
1403 /**
1404 * @opdone
1405 * @opmnemonic udvex660f13m3
1406 * @opcode 0x13
1407 * @opcodesub 11 mr/reg
1408 * @oppfx 0x66
1409 * @opunused immediate
1410 * @opcpuid avx
1411 * @optest ->
1412 */
1413 else
1414 IEMOP_RAISE_INVALID_OPCODE_RET();
1415}
1416
1417/* Opcode VEX.F3.0F 0x13 - invalid */
1418/* Opcode VEX.F2.0F 0x13 - invalid */
1419
1420/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1421FNIEMOP_DEF(iemOp_vunpcklps_Vx_Hx_Wx)
1422{
1423 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPS, vunpcklps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1424 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklps);
1425 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1426}
1427
1428
1429/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1430FNIEMOP_DEF(iemOp_vunpcklpd_Vx_Hx_Wx)
1431{
1432 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPD, vunpcklpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1433 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklpd);
1434 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1435}
1436
1437
1438/* Opcode VEX.F3.0F 0x14 - invalid */
1439/* Opcode VEX.F2.0F 0x14 - invalid */
1440
1441
1442/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1443FNIEMOP_DEF(iemOp_vunpckhps_Vx_Hx_Wx)
1444{
1445 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPS, vunpckhps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1446 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhps);
1447 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1448}
1449
1450
1451/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1452FNIEMOP_DEF(iemOp_vunpckhpd_Vx_Hx_Wx)
1453{
1454 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPD, vunpckhpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1455 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhpd);
1456 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1457}
1458
1459
1460/* Opcode VEX.F3.0F 0x15 - invalid */
1461/* Opcode VEX.F2.0F 0x15 - invalid */
1462
1463
1464FNIEMOP_DEF(iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq)
1465{
1466 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1467 if (IEM_IS_MODRM_REG_MODE(bRm))
1468 {
1469 /**
1470 * @opcode 0x16
1471 * @opcodesub 11 mr/reg
1472 * @oppfx none
1473 * @opcpuid avx
1474 * @opgroup og_avx_simdfp_datamerge
1475 * @opxcpttype 7LZ
1476 */
1477 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVLHPS, vmovlhps, Vq_WO, Hq, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1478
1479 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1480 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1481
1482 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1483 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1484 IEM_MC_MERGE_YREG_U64LO_U64LO_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1485 IEM_GET_MODRM_RM(pVCpu, bRm),
1486 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1487
1488 IEM_MC_ADVANCE_RIP_AND_FINISH();
1489 IEM_MC_END();
1490 }
1491 else
1492 {
1493 /**
1494 * @opdone
1495 * @opcode 0x16
1496 * @opcodesub !11 mr/reg
1497 * @oppfx none
1498 * @opcpuid avx
1499 * @opgroup og_avx_simdfp_datamove
1500 * @opxcpttype 5LZ
1501 * @opfunction iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq
1502 */
1503 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPS, vmovhps, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1504
1505 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1506 IEM_MC_LOCAL(uint64_t, uSrc);
1507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1508
1509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1510 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1511 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1512 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1513
1514 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1515 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1516 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1517 uSrc);
1518
1519 IEM_MC_ADVANCE_RIP_AND_FINISH();
1520 IEM_MC_END();
1521 }
1522}
1523
1524
1525/**
1526 * @opcode 0x16
1527 * @opcodesub !11 mr/reg
1528 * @oppfx 0x66
1529 * @opcpuid avx
1530 * @opgroup og_avx_pcksclr_datamerge
1531 * @opxcpttype 5LZ
1532 */
1533FNIEMOP_DEF(iemOp_vmovhpd_Vdq_Hq_Mq)
1534{
1535 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1536 if (IEM_IS_MODRM_MEM_MODE(bRm))
1537 {
1538 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPD, vmovhpd, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1539
1540 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1541 IEM_MC_LOCAL(uint64_t, uSrc);
1542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1543
1544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1545 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1546 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1547 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1548
1549 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1550 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1551 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1552 uSrc);
1553
1554 IEM_MC_ADVANCE_RIP_AND_FINISH();
1555 IEM_MC_END();
1556 }
1557
1558 /**
1559 * @opdone
1560 * @opmnemonic udvex660f16m3
1561 * @opcode 0x12
1562 * @opcodesub 11 mr/reg
1563 * @oppfx 0x66
1564 * @opunused immediate
1565 * @opcpuid avx
1566 * @optest ->
1567 */
1568 else
1569 IEMOP_RAISE_INVALID_OPCODE_RET();
1570}
1571
1572
1573/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1574/**
1575 * @opcode 0x16
1576 * @oppfx 0xf3
1577 * @opcpuid avx
1578 * @opgroup og_avx_pcksclr_datamove
1579 * @opxcpttype 4
1580 */
1581FNIEMOP_DEF(iemOp_vmovshdup_Vx_Wx)
1582{
1583 IEMOP_MNEMONIC2(VEX_RM, VMOVSHDUP, vmovshdup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1584 Assert(pVCpu->iem.s.uVexLength <= 1);
1585 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1586 if (IEM_IS_MODRM_REG_MODE(bRm))
1587 {
1588 /*
1589 * Register, register.
1590 */
1591 if (pVCpu->iem.s.uVexLength == 0)
1592 {
1593 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1594 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1595 IEM_MC_LOCAL(RTUINT128U, uSrc);
1596
1597 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1598 IEM_MC_PREPARE_AVX_USAGE();
1599
1600 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1601 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1602 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1603 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1604 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1605 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1606
1607 IEM_MC_ADVANCE_RIP_AND_FINISH();
1608 IEM_MC_END();
1609 }
1610 else
1611 {
1612 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1613 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1614 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1615 IEM_MC_PREPARE_AVX_USAGE();
1616
1617 IEM_MC_LOCAL(RTUINT256U, uSrc);
1618 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1619 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1620 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1621 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1622 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1623 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 5);
1624 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 5);
1625 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 7);
1626 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 7);
1627 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1628
1629 IEM_MC_ADVANCE_RIP_AND_FINISH();
1630 IEM_MC_END();
1631 }
1632 }
1633 else
1634 {
1635 /*
1636 * Register, memory.
1637 */
1638 if (pVCpu->iem.s.uVexLength == 0)
1639 {
1640 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1641 IEM_MC_LOCAL(RTUINT128U, uSrc);
1642 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1643
1644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1645 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1646 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1647 IEM_MC_PREPARE_AVX_USAGE();
1648
1649 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1650 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1651 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1652 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1653 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1654 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1655
1656 IEM_MC_ADVANCE_RIP_AND_FINISH();
1657 IEM_MC_END();
1658 }
1659 else
1660 {
1661 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1664 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1665 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1666 IEM_MC_PREPARE_AVX_USAGE();
1667
1668 IEM_MC_LOCAL(RTUINT256U, uSrc);
1669 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1670
1671 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1672 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1673 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1674 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1675 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 5);
1676 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 5);
1677 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 7);
1678 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 7);
1679 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1680
1681 IEM_MC_ADVANCE_RIP_AND_FINISH();
1682 IEM_MC_END();
1683 }
1684 }
1685}
1686
1687
1688/* Opcode VEX.F2.0F 0x16 - invalid */
1689
1690
1691/**
1692 * @opcode 0x17
1693 * @opcodesub !11 mr/reg
1694 * @oppfx none
1695 * @opcpuid avx
1696 * @opgroup og_avx_simdfp_datamove
1697 * @opxcpttype 5
1698 */
1699FNIEMOP_DEF(iemOp_vmovhps_Mq_Vq)
1700{
1701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1702 if (IEM_IS_MODRM_MEM_MODE(bRm))
1703 {
1704 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPS, vmovhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1705
1706 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1707 IEM_MC_LOCAL(uint64_t, uSrc);
1708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1709
1710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1711 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1712 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1713 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1714
1715 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1716 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1717
1718 IEM_MC_ADVANCE_RIP_AND_FINISH();
1719 IEM_MC_END();
1720 }
1721
1722 /**
1723 * @opdone
1724 * @opmnemonic udvex0f17m3
1725 * @opcode 0x17
1726 * @opcodesub 11 mr/reg
1727 * @oppfx none
1728 * @opunused immediate
1729 * @opcpuid avx
1730 * @optest ->
1731 */
1732 else
1733 IEMOP_RAISE_INVALID_OPCODE_RET();
1734}
1735
1736
1737/**
1738 * @opcode 0x17
1739 * @opcodesub !11 mr/reg
1740 * @oppfx 0x66
1741 * @opcpuid avx
1742 * @opgroup og_avx_pcksclr_datamove
1743 * @opxcpttype 5
1744 */
1745FNIEMOP_DEF(iemOp_vmovhpd_Mq_Vq)
1746{
1747 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1748 if (IEM_IS_MODRM_MEM_MODE(bRm))
1749 {
1750 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPD, vmovhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1751
1752 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1753 IEM_MC_LOCAL(uint64_t, uSrc);
1754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1755
1756 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1757 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1758 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1759 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1760
1761 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1762 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1763
1764 IEM_MC_ADVANCE_RIP_AND_FINISH();
1765 IEM_MC_END();
1766 }
1767
1768 /**
1769 * @opdone
1770 * @opmnemonic udvex660f17m3
1771 * @opcode 0x17
1772 * @opcodesub 11 mr/reg
1773 * @oppfx 0x66
1774 * @opunused immediate
1775 * @opcpuid avx
1776 * @optest ->
1777 */
1778 else
1779 IEMOP_RAISE_INVALID_OPCODE_RET();
1780}
1781
1782
1783/* Opcode VEX.F3.0F 0x17 - invalid */
1784/* Opcode VEX.F2.0F 0x17 - invalid */
1785
1786
1787/* Opcode VEX.0F 0x18 - invalid */
1788/* Opcode VEX.0F 0x19 - invalid */
1789/* Opcode VEX.0F 0x1a - invalid */
1790/* Opcode VEX.0F 0x1b - invalid */
1791/* Opcode VEX.0F 0x1c - invalid */
1792/* Opcode VEX.0F 0x1d - invalid */
1793/* Opcode VEX.0F 0x1e - invalid */
1794/* Opcode VEX.0F 0x1f - invalid */
1795
1796/* Opcode VEX.0F 0x20 - invalid */
1797/* Opcode VEX.0F 0x21 - invalid */
1798/* Opcode VEX.0F 0x22 - invalid */
1799/* Opcode VEX.0F 0x23 - invalid */
1800/* Opcode VEX.0F 0x24 - invalid */
1801/* Opcode VEX.0F 0x25 - invalid */
1802/* Opcode VEX.0F 0x26 - invalid */
1803/* Opcode VEX.0F 0x27 - invalid */
1804
1805/**
1806 * @opcode 0x28
1807 * @oppfx none
1808 * @opcpuid avx
1809 * @opgroup og_avx_pcksclr_datamove
1810 * @opxcpttype 1
1811 * @optest op1=1 op2=2 -> op1=2
1812 * @optest op1=0 op2=-42 -> op1=-42
1813 * @note Almost identical to vmovapd.
1814 */
1815FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
1816{
1817 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1818 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1819 Assert(pVCpu->iem.s.uVexLength <= 1);
1820 if (IEM_IS_MODRM_REG_MODE(bRm))
1821 {
1822 /*
1823 * Register, register.
1824 */
1825 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1826 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1827
1828 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1829 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1830 if (pVCpu->iem.s.uVexLength == 0)
1831 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1832 IEM_GET_MODRM_RM(pVCpu, bRm));
1833 else
1834 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1835 IEM_GET_MODRM_RM(pVCpu, bRm));
1836 IEM_MC_ADVANCE_RIP_AND_FINISH();
1837 IEM_MC_END();
1838 }
1839 else
1840 {
1841 /*
1842 * Register, memory.
1843 */
1844 if (pVCpu->iem.s.uVexLength == 0)
1845 {
1846 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1848 IEM_MC_LOCAL(RTUINT128U, uSrc);
1849
1850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1851 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1852 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1853 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1854
1855 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1856 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1857
1858 IEM_MC_ADVANCE_RIP_AND_FINISH();
1859 IEM_MC_END();
1860 }
1861 else
1862 {
1863 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1864 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1865 IEM_MC_LOCAL(RTUINT256U, uSrc);
1866
1867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1868 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1869 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1870 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1871
1872 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1873 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1874
1875 IEM_MC_ADVANCE_RIP_AND_FINISH();
1876 IEM_MC_END();
1877 }
1878 }
1879}
1880
1881
1882/**
1883 * @opcode 0x28
1884 * @oppfx 66
1885 * @opcpuid avx
1886 * @opgroup og_avx_pcksclr_datamove
1887 * @opxcpttype 1
1888 * @optest op1=1 op2=2 -> op1=2
1889 * @optest op1=0 op2=-42 -> op1=-42
1890 * @note Almost identical to vmovaps
1891 */
1892FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
1893{
1894 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1895 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1896 Assert(pVCpu->iem.s.uVexLength <= 1);
1897 if (IEM_IS_MODRM_REG_MODE(bRm))
1898 {
1899 /*
1900 * Register, register.
1901 */
1902 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1903 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1904
1905 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1906 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1907 if (pVCpu->iem.s.uVexLength == 0)
1908 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1909 IEM_GET_MODRM_RM(pVCpu, bRm));
1910 else
1911 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1912 IEM_GET_MODRM_RM(pVCpu, bRm));
1913 IEM_MC_ADVANCE_RIP_AND_FINISH();
1914 IEM_MC_END();
1915 }
1916 else
1917 {
1918 /*
1919 * Register, memory.
1920 */
1921 if (pVCpu->iem.s.uVexLength == 0)
1922 {
1923 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1924 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1925 IEM_MC_LOCAL(RTUINT128U, uSrc);
1926
1927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1928 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1929 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1930 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1931
1932 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1933 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1934
1935 IEM_MC_ADVANCE_RIP_AND_FINISH();
1936 IEM_MC_END();
1937 }
1938 else
1939 {
1940 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1942 IEM_MC_LOCAL(RTUINT256U, uSrc);
1943
1944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1945 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1946 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1947 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1948
1949 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1950 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1951
1952 IEM_MC_ADVANCE_RIP_AND_FINISH();
1953 IEM_MC_END();
1954 }
1955 }
1956}
1957
1958/**
1959 * @opmnemonic udvexf30f28
1960 * @opcode 0x28
1961 * @oppfx 0xf3
1962 * @opunused vex.modrm
1963 * @opcpuid avx
1964 * @optest ->
1965 * @opdone
1966 */
1967
1968/**
1969 * @opmnemonic udvexf20f28
1970 * @opcode 0x28
1971 * @oppfx 0xf2
1972 * @opunused vex.modrm
1973 * @opcpuid avx
1974 * @optest ->
1975 * @opdone
1976 */
1977
1978/**
1979 * @opcode 0x29
1980 * @oppfx none
1981 * @opcpuid avx
1982 * @opgroup og_avx_pcksclr_datamove
1983 * @opxcpttype 1
1984 * @optest op1=1 op2=2 -> op1=2
1985 * @optest op1=0 op2=-42 -> op1=-42
1986 * @note Almost identical to vmovapd.
1987 */
1988FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
1989{
1990 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1991 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1992 Assert(pVCpu->iem.s.uVexLength <= 1);
1993 if (IEM_IS_MODRM_REG_MODE(bRm))
1994 {
1995 /*
1996 * Register, register.
1997 */
1998 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1999 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2000
2001 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2002 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2003 if (pVCpu->iem.s.uVexLength == 0)
2004 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2005 IEM_GET_MODRM_REG(pVCpu, bRm));
2006 else
2007 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2008 IEM_GET_MODRM_REG(pVCpu, bRm));
2009 IEM_MC_ADVANCE_RIP_AND_FINISH();
2010 IEM_MC_END();
2011 }
2012 else
2013 {
2014 /*
2015 * Register, memory.
2016 */
2017 if (pVCpu->iem.s.uVexLength == 0)
2018 {
2019 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2021 IEM_MC_LOCAL(RTUINT128U, uSrc);
2022
2023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2024 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2025 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2026 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2027
2028 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2029 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2030
2031 IEM_MC_ADVANCE_RIP_AND_FINISH();
2032 IEM_MC_END();
2033 }
2034 else
2035 {
2036 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2038 IEM_MC_LOCAL(RTUINT256U, uSrc);
2039
2040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2041 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2042 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2043 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2044
2045 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2046 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2047
2048 IEM_MC_ADVANCE_RIP_AND_FINISH();
2049 IEM_MC_END();
2050 }
2051 }
2052}
2053
2054/**
2055 * @opcode 0x29
2056 * @oppfx 66
2057 * @opcpuid avx
2058 * @opgroup og_avx_pcksclr_datamove
2059 * @opxcpttype 1
2060 * @optest op1=1 op2=2 -> op1=2
2061 * @optest op1=0 op2=-42 -> op1=-42
2062 * @note Almost identical to vmovaps
2063 */
2064FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
2065{
2066 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2067 Assert(pVCpu->iem.s.uVexLength <= 1);
2068 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2069 if (IEM_IS_MODRM_REG_MODE(bRm))
2070 {
2071 /*
2072 * Register, register.
2073 */
2074 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2075 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2076
2077 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2078 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2079 if (pVCpu->iem.s.uVexLength == 0)
2080 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2081 IEM_GET_MODRM_REG(pVCpu, bRm));
2082 else
2083 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2084 IEM_GET_MODRM_REG(pVCpu, bRm));
2085 IEM_MC_ADVANCE_RIP_AND_FINISH();
2086 IEM_MC_END();
2087 }
2088 else
2089 {
2090 /*
2091 * Register, memory.
2092 */
2093 if (pVCpu->iem.s.uVexLength == 0)
2094 {
2095 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2097 IEM_MC_LOCAL(RTUINT128U, uSrc);
2098
2099 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2100 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2101 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2102 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2103
2104 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2105 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2106
2107 IEM_MC_ADVANCE_RIP_AND_FINISH();
2108 IEM_MC_END();
2109 }
2110 else
2111 {
2112 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2114 IEM_MC_LOCAL(RTUINT256U, uSrc);
2115
2116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2117 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2118 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2119 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2120
2121 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2122 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2123
2124 IEM_MC_ADVANCE_RIP_AND_FINISH();
2125 IEM_MC_END();
2126 }
2127 }
2128}
2129
2130
2131/**
2132 * @opmnemonic udvexf30f29
2133 * @opcode 0x29
2134 * @oppfx 0xf3
2135 * @opunused vex.modrm
2136 * @opcpuid avx
2137 * @optest ->
2138 * @opdone
2139 */
2140
2141/**
2142 * @opmnemonic udvexf20f29
2143 * @opcode 0x29
2144 * @oppfx 0xf2
2145 * @opunused vex.modrm
2146 * @opcpuid avx
2147 * @optest ->
2148 * @opdone
2149 */
2150
2151
2152/** Opcode VEX.0F 0x2a - invalid */
2153/** Opcode VEX.66.0F 0x2a - invalid */
2154/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
2155FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey);
2156/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2157FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey);
2158
2159
2160/**
2161 * @opcode 0x2b
2162 * @opcodesub !11 mr/reg
2163 * @oppfx none
2164 * @opcpuid avx
2165 * @opgroup og_avx_cachect
2166 * @opxcpttype 1
2167 * @optest op1=1 op2=2 -> op1=2
2168 * @optest op1=0 op2=-42 -> op1=-42
2169 * @note Identical implementation to vmovntpd
2170 */
2171FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
2172{
2173 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2174 Assert(pVCpu->iem.s.uVexLength <= 1);
2175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2176 if (IEM_IS_MODRM_MEM_MODE(bRm))
2177 {
2178 /*
2179 * memory, register.
2180 */
2181 if (pVCpu->iem.s.uVexLength == 0)
2182 {
2183 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2184 IEM_MC_LOCAL(RTUINT128U, uSrc);
2185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2186
2187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2188 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2189 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2190 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2191
2192 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2193 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2194
2195 IEM_MC_ADVANCE_RIP_AND_FINISH();
2196 IEM_MC_END();
2197 }
2198 else
2199 {
2200 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2201 IEM_MC_LOCAL(RTUINT256U, uSrc);
2202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2203
2204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2205 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2206 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2207 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2208
2209 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2210 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2211
2212 IEM_MC_ADVANCE_RIP_AND_FINISH();
2213 IEM_MC_END();
2214 }
2215 }
2216 /* The register, register encoding is invalid. */
2217 else
2218 IEMOP_RAISE_INVALID_OPCODE_RET();
2219}
2220
2221/**
2222 * @opcode 0x2b
2223 * @opcodesub !11 mr/reg
2224 * @oppfx 0x66
2225 * @opcpuid avx
2226 * @opgroup og_avx_cachect
2227 * @opxcpttype 1
2228 * @optest op1=1 op2=2 -> op1=2
2229 * @optest op1=0 op2=-42 -> op1=-42
2230 * @note Identical implementation to vmovntps
2231 */
2232FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
2233{
2234 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2235 Assert(pVCpu->iem.s.uVexLength <= 1);
2236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2237 if (IEM_IS_MODRM_MEM_MODE(bRm))
2238 {
2239 /*
2240 * memory, register.
2241 */
2242 if (pVCpu->iem.s.uVexLength == 0)
2243 {
2244 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2245 IEM_MC_LOCAL(RTUINT128U, uSrc);
2246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2247
2248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2249 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2250 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2251 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2252
2253 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2254 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2255
2256 IEM_MC_ADVANCE_RIP_AND_FINISH();
2257 IEM_MC_END();
2258 }
2259 else
2260 {
2261 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2262 IEM_MC_LOCAL(RTUINT256U, uSrc);
2263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2264
2265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2266 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2267 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2268 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2269
2270 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2271 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2272
2273 IEM_MC_ADVANCE_RIP_AND_FINISH();
2274 IEM_MC_END();
2275 }
2276 }
2277 /* The register, register encoding is invalid. */
2278 else
2279 IEMOP_RAISE_INVALID_OPCODE_RET();
2280}
2281
2282/**
2283 * @opmnemonic udvexf30f2b
2284 * @opcode 0x2b
2285 * @oppfx 0xf3
2286 * @opunused vex.modrm
2287 * @opcpuid avx
2288 * @optest ->
2289 * @opdone
2290 */
2291
2292/**
2293 * @opmnemonic udvexf20f2b
2294 * @opcode 0x2b
2295 * @oppfx 0xf2
2296 * @opunused vex.modrm
2297 * @opcpuid avx
2298 * @optest ->
2299 * @opdone
2300 */
2301
2302
2303/* Opcode VEX.0F 0x2c - invalid */
2304/* Opcode VEX.66.0F 0x2c - invalid */
2305/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
2306FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss);
2307/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
2308FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd);
2309
2310/* Opcode VEX.0F 0x2d - invalid */
2311/* Opcode VEX.66.0F 0x2d - invalid */
2312/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
2313FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss);
2314/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
2315FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd);
2316
2317
2318/**
2319 * @opcode 0x2e
2320 * @oppfx none
2321 * @opflmodify cf,pf,af,zf,sf,of
2322 * @opflclear af,sf,of
2323 */
2324FNIEMOP_DEF(iemOp_vucomiss_Vss_Wss)
2325{
2326 IEMOP_MNEMONIC2(VEX_RM, VUCOMISS, vucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2327 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2328 if (IEM_IS_MODRM_REG_MODE(bRm))
2329 {
2330 /*
2331 * Register, register.
2332 */
2333 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2334 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2335 IEM_MC_LOCAL(uint32_t, fEFlags);
2336 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2337 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2338 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2339 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2340 IEM_MC_PREPARE_AVX_USAGE();
2341 IEM_MC_FETCH_EFLAGS(fEFlags);
2342 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2343 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
2344 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2345 pEFlags, uSrc1, uSrc2);
2346 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2347 IEM_MC_COMMIT_EFLAGS(fEFlags);
2348
2349 IEM_MC_ADVANCE_RIP_AND_FINISH();
2350 IEM_MC_END();
2351 }
2352 else
2353 {
2354 /*
2355 * Register, memory.
2356 */
2357 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2358 IEM_MC_LOCAL(uint32_t, fEFlags);
2359 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2360 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2361 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2363
2364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2365 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2366 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2367 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2368
2369 IEM_MC_PREPARE_AVX_USAGE();
2370 IEM_MC_FETCH_EFLAGS(fEFlags);
2371 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2372 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2373 pEFlags, uSrc1, uSrc2);
2374 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2375 IEM_MC_COMMIT_EFLAGS(fEFlags);
2376
2377 IEM_MC_ADVANCE_RIP_AND_FINISH();
2378 IEM_MC_END();
2379 }
2380}
2381
2382
2383/**
2384 * @opcode 0x2e
2385 * @oppfx 0x66
2386 * @opflmodify cf,pf,af,zf,sf,of
2387 * @opflclear af,sf,of
2388 */
2389FNIEMOP_DEF(iemOp_vucomisd_Vsd_Wsd)
2390{
2391 IEMOP_MNEMONIC2(VEX_RM, VUCOMISD, vucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2392 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2393 if (IEM_IS_MODRM_REG_MODE(bRm))
2394 {
2395 /*
2396 * Register, register.
2397 */
2398 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2399 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2400 IEM_MC_LOCAL(uint32_t, fEFlags);
2401 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2402 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2403 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2404 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2405 IEM_MC_PREPARE_AVX_USAGE();
2406 IEM_MC_FETCH_EFLAGS(fEFlags);
2407 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2408 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
2409 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2410 pEFlags, uSrc1, uSrc2);
2411 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2412 IEM_MC_COMMIT_EFLAGS(fEFlags);
2413
2414 IEM_MC_ADVANCE_RIP_AND_FINISH();
2415 IEM_MC_END();
2416 }
2417 else
2418 {
2419 /*
2420 * Register, memory.
2421 */
2422 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2423 IEM_MC_LOCAL(uint32_t, fEFlags);
2424 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2425 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2426 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2428
2429 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2430 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2431 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2432 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2433
2434 IEM_MC_PREPARE_AVX_USAGE();
2435 IEM_MC_FETCH_EFLAGS(fEFlags);
2436 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2437 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
2438 pEFlags, uSrc1, uSrc2);
2439 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2440 IEM_MC_COMMIT_EFLAGS(fEFlags);
2441
2442 IEM_MC_ADVANCE_RIP_AND_FINISH();
2443 IEM_MC_END();
2444 }
2445}
2446
2447
2448/* Opcode VEX.F3.0F 0x2e - invalid */
2449/* Opcode VEX.F2.0F 0x2e - invalid */
2450
2451/**
2452 * @opcode 0x2f
2453 * @oppfx none
2454 * @opflmodify cf,pf,af,zf,sf,of
2455 * @opflclear af,sf,of
2456 */
2457FNIEMOP_DEF(iemOp_vcomiss_Vss_Wss)
2458{
2459 IEMOP_MNEMONIC2(VEX_RM, VCOMISS, vcomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2461 if (IEM_IS_MODRM_REG_MODE(bRm))
2462 {
2463 /*
2464 * Register, register.
2465 */
2466 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2467 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2468 IEM_MC_LOCAL(uint32_t, fEFlags);
2469 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2470 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2471 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2472 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2473 IEM_MC_PREPARE_AVX_USAGE();
2474 IEM_MC_FETCH_EFLAGS(fEFlags);
2475 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2476 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
2477 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2478 pEFlags, uSrc1, uSrc2);
2479 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2480 IEM_MC_COMMIT_EFLAGS(fEFlags);
2481
2482 IEM_MC_ADVANCE_RIP_AND_FINISH();
2483 IEM_MC_END();
2484 }
2485 else
2486 {
2487 /*
2488 * Register, memory.
2489 */
2490 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2491 IEM_MC_LOCAL(uint32_t, fEFlags);
2492 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2493 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2494 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2496
2497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2498 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2499 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2500 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2501
2502 IEM_MC_PREPARE_AVX_USAGE();
2503 IEM_MC_FETCH_EFLAGS(fEFlags);
2504 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2505 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
2506 pEFlags, uSrc1, uSrc2);
2507 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2508 IEM_MC_COMMIT_EFLAGS(fEFlags);
2509
2510 IEM_MC_ADVANCE_RIP_AND_FINISH();
2511 IEM_MC_END();
2512 }
2513}
2514
2515
2516/**
2517 * @opcode 0x2f
2518 * @oppfx 0x66
2519 * @opflmodify cf,pf,af,zf,sf,of
2520 * @opflclear af,sf,of
2521 */
2522FNIEMOP_DEF(iemOp_vcomisd_Vsd_Wsd)
2523{
2524 IEMOP_MNEMONIC2(VEX_RM, VCOMISD, vcomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2526 if (IEM_IS_MODRM_REG_MODE(bRm))
2527 {
2528 /*
2529 * Register, register.
2530 */
2531 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2532 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2533 IEM_MC_LOCAL(uint32_t, fEFlags);
2534 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2535 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2536 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2537 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2538 IEM_MC_PREPARE_AVX_USAGE();
2539 IEM_MC_FETCH_EFLAGS(fEFlags);
2540 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2541 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
2542 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2543 pEFlags, uSrc1, uSrc2);
2544 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2545 IEM_MC_COMMIT_EFLAGS(fEFlags);
2546
2547 IEM_MC_ADVANCE_RIP_AND_FINISH();
2548 IEM_MC_END();
2549 }
2550 else
2551 {
2552 /*
2553 * Register, memory.
2554 */
2555 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2556 IEM_MC_LOCAL(uint32_t, fEFlags);
2557 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2558 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
2559 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
2560 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2561
2562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2563 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2564 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2565 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2566
2567 IEM_MC_PREPARE_AVX_USAGE();
2568 IEM_MC_FETCH_EFLAGS(fEFlags);
2569 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
2570 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
2571 pEFlags, uSrc1, uSrc2);
2572 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
2573 IEM_MC_COMMIT_EFLAGS(fEFlags);
2574
2575 IEM_MC_ADVANCE_RIP_AND_FINISH();
2576 IEM_MC_END();
2577 }
2578}
2579
2580
2581/* Opcode VEX.F3.0F 0x2f - invalid */
2582/* Opcode VEX.F2.0F 0x2f - invalid */
2583
2584/* Opcode VEX.0F 0x30 - invalid */
2585/* Opcode VEX.0F 0x31 - invalid */
2586/* Opcode VEX.0F 0x32 - invalid */
2587/* Opcode VEX.0F 0x33 - invalid */
2588/* Opcode VEX.0F 0x34 - invalid */
2589/* Opcode VEX.0F 0x35 - invalid */
2590/* Opcode VEX.0F 0x36 - invalid */
2591/* Opcode VEX.0F 0x37 - invalid */
2592/* Opcode VEX.0F 0x38 - invalid */
2593/* Opcode VEX.0F 0x39 - invalid */
2594/* Opcode VEX.0F 0x3a - invalid */
2595/* Opcode VEX.0F 0x3b - invalid */
2596/* Opcode VEX.0F 0x3c - invalid */
2597/* Opcode VEX.0F 0x3d - invalid */
2598/* Opcode VEX.0F 0x3e - invalid */
2599/* Opcode VEX.0F 0x3f - invalid */
2600/* Opcode VEX.0F 0x40 - invalid */
2601/* Opcode VEX.0F 0x41 - invalid */
2602/* Opcode VEX.0F 0x42 - invalid */
2603/* Opcode VEX.0F 0x43 - invalid */
2604/* Opcode VEX.0F 0x44 - invalid */
2605/* Opcode VEX.0F 0x45 - invalid */
2606/* Opcode VEX.0F 0x46 - invalid */
2607/* Opcode VEX.0F 0x47 - invalid */
2608/* Opcode VEX.0F 0x48 - invalid */
2609/* Opcode VEX.0F 0x49 - invalid */
2610/* Opcode VEX.0F 0x4a - invalid */
2611/* Opcode VEX.0F 0x4b - invalid */
2612/* Opcode VEX.0F 0x4c - invalid */
2613/* Opcode VEX.0F 0x4d - invalid */
2614/* Opcode VEX.0F 0x4e - invalid */
2615/* Opcode VEX.0F 0x4f - invalid */
2616
2617
2618/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
2619FNIEMOP_DEF(iemOp_vmovmskps_Gy_Ups)
2620{
2621 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPS, vmovmskps, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2622 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2623 if (IEM_IS_MODRM_REG_MODE(bRm))
2624 {
2625 /*
2626 * Register, register.
2627 */
2628 if (pVCpu->iem.s.uVexLength == 0)
2629 {
2630 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2631 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2632 IEM_MC_LOCAL(uint8_t, u8Dst);
2633 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2634 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2635 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2636 IEM_MC_PREPARE_AVX_USAGE();
2637 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2638 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskps_u128, iemAImpl_vmovmskps_u128_fallback),
2639 pu8Dst, puSrc);
2640 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2641 IEM_MC_ADVANCE_RIP_AND_FINISH();
2642 IEM_MC_END();
2643 }
2644 else
2645 {
2646 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2647 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
2648 IEM_MC_LOCAL(uint8_t, u8Dst);
2649 IEM_MC_LOCAL(RTUINT256U, uSrc);
2650 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2651 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2652
2653 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2654 IEM_MC_PREPARE_AVX_USAGE();
2655 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2656 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskps_u256, iemAImpl_vmovmskps_u256_fallback),
2657 pu8Dst, puSrc);
2658 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2659 IEM_MC_ADVANCE_RIP_AND_FINISH();
2660 IEM_MC_END();
2661 }
2662 }
2663 /* No memory operand. */
2664 else
2665 IEMOP_RAISE_INVALID_OPCODE_RET();
2666}
2667
2668
2669/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
2670FNIEMOP_DEF(iemOp_vmovmskpd_Gy_Upd)
2671{
2672 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPD, vmovmskpd, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
2673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2674 if (IEM_IS_MODRM_REG_MODE(bRm))
2675 {
2676 /*
2677 * Register, register.
2678 */
2679 if (pVCpu->iem.s.uVexLength == 0)
2680 {
2681 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2682 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2683 IEM_MC_LOCAL(uint8_t, u8Dst);
2684 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2685 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
2686 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2687 IEM_MC_PREPARE_AVX_USAGE();
2688 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2689 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskpd_u128, iemAImpl_vmovmskpd_u128_fallback),
2690 pu8Dst, puSrc);
2691 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2692 IEM_MC_ADVANCE_RIP_AND_FINISH();
2693 IEM_MC_END();
2694 }
2695 else
2696 {
2697 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2698 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
2699 IEM_MC_LOCAL(uint8_t, u8Dst);
2700 IEM_MC_LOCAL(RTUINT256U, uSrc);
2701 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
2702 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
2703
2704 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2705 IEM_MC_PREPARE_AVX_USAGE();
2706 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
2707 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskpd_u256, iemAImpl_vmovmskpd_u256_fallback),
2708 pu8Dst, puSrc);
2709 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
2710 IEM_MC_ADVANCE_RIP_AND_FINISH();
2711 IEM_MC_END();
2712 }
2713 }
2714 /* No memory operand. */
2715 else
2716 IEMOP_RAISE_INVALID_OPCODE_RET();
2717}
2718
2719
2720/* Opcode VEX.F3.0F 0x50 - invalid */
2721/* Opcode VEX.F2.0F 0x50 - invalid */
2722
2723/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
2724FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps);
2725/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
2726FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd);
2727/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
2728FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss);
2729/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
2730FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd);
2731
2732/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
2733FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps);
2734/* Opcode VEX.66.0F 0x52 - invalid */
2735/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
2736FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss);
2737/* Opcode VEX.F2.0F 0x52 - invalid */
2738
2739/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
2740FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps);
2741/* Opcode VEX.66.0F 0x53 - invalid */
2742/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
2743FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss);
2744/* Opcode VEX.F2.0F 0x53 - invalid */
2745
2746
2747/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
2748FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
2749{
2750 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2751 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2752 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2753}
2754
2755
2756/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
2757FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
2758{
2759 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2760 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2761 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
2762}
2763
2764
2765/* Opcode VEX.F3.0F 0x54 - invalid */
2766/* Opcode VEX.F2.0F 0x54 - invalid */
2767
2768
2769/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
2770FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
2771{
2772 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2773 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2774 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2775}
2776
2777
2778/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
2779FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
2780{
2781 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2782 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2783 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
2784}
2785
2786
2787/* Opcode VEX.F3.0F 0x55 - invalid */
2788/* Opcode VEX.F2.0F 0x55 - invalid */
2789
2790/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
2791FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
2792{
2793 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2794 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2795 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2796}
2797
2798
2799/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
2800FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
2801{
2802 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2803 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2804 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
2805}
2806
2807
2808/* Opcode VEX.F3.0F 0x56 - invalid */
2809/* Opcode VEX.F2.0F 0x56 - invalid */
2810
2811
2812/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
2813FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
2814{
2815 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
2816 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2817 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2818}
2819
2820
2821/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
2822FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
2823{
2824 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
2825 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
2826 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
2827}
2828
2829
2830/* Opcode VEX.F3.0F 0x57 - invalid */
2831/* Opcode VEX.F2.0F 0x57 - invalid */
2832
2833
2834/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
2835FNIEMOP_DEF(iemOp_vaddps_Vps_Hps_Wps)
2836{
2837 IEMOP_MNEMONIC3(VEX_RVM, VADDPS, vaddps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2838 IEMOPMEDIAF3_INIT_VARS( vaddps);
2839 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
2840}
2841
2842
2843/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
2844FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd);
2845/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
2846FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss);
2847/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
2848FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd);
2849
2850/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
2851FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps);
2852/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
2853FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd);
2854/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
2855FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss);
2856/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
2857FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd);
2858
2859/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
2860FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps);
2861/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
2862FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd);
2863/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
2864FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss);
2865/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
2866FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd);
2867
2868/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
2869FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq);
2870/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
2871FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps);
2872/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
2873FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps);
2874/* Opcode VEX.F2.0F 0x5b - invalid */
2875
2876/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
2877FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps);
2878/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
2879FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd);
2880/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
2881FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss);
2882/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
2883FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd);
2884
2885/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
2886FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps);
2887/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
2888FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd);
2889/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
2890FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss);
2891/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
2892FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd);
2893
2894/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
2895FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps);
2896/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
2897FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd);
2898/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
2899FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss);
2900/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
2901FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd);
2902
2903/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
2904FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps);
2905/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
2906FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd);
2907/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
2908FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss);
2909/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
2910FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd);
2911
2912
2913/* Opcode VEX.0F 0x60 - invalid */
2914
2915
2916/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
2917FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
2918{
2919 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2920 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklbw);
2921 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2922}
2923
2924
2925/* Opcode VEX.F3.0F 0x60 - invalid */
2926
2927
2928/* Opcode VEX.0F 0x61 - invalid */
2929
2930
2931/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
2932FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
2933{
2934 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2935 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklwd);
2936 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2937}
2938
2939
2940/* Opcode VEX.F3.0F 0x61 - invalid */
2941
2942
2943/* Opcode VEX.0F 0x62 - invalid */
2944
2945/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
2946FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
2947{
2948 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2949 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckldq);
2950 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2951}
2952
2953
2954/* Opcode VEX.F3.0F 0x62 - invalid */
2955
2956
2957
2958/* Opcode VEX.0F 0x63 - invalid */
2959
2960
2961/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
2962FNIEMOP_DEF(iemOp_vpacksswb_Vx_Hx_Wx)
2963{
2964 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSWB, vpacksswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2965 IEMOPMEDIAOPTF3_INIT_VARS( vpacksswb);
2966 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2967}
2968
2969
2970/* Opcode VEX.F3.0F 0x63 - invalid */
2971
2972/* Opcode VEX.0F 0x64 - invalid */
2973
2974
2975/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
2976FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
2977{
2978 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2979 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtb);
2980 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2981}
2982
2983
2984/* Opcode VEX.F3.0F 0x64 - invalid */
2985
2986/* Opcode VEX.0F 0x65 - invalid */
2987
2988
2989/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
2990FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
2991{
2992 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
2993 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtw);
2994 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
2995}
2996
2997
2998/* Opcode VEX.F3.0F 0x65 - invalid */
2999
3000/* Opcode VEX.0F 0x66 - invalid */
3001
3002
3003/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
3004FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
3005{
3006 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3007 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtd);
3008 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3009}
3010
3011
3012/* Opcode VEX.F3.0F 0x66 - invalid */
3013
3014/* Opcode VEX.0F 0x67 - invalid */
3015
3016
3017/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
3018FNIEMOP_DEF(iemOp_vpackuswb_Vx_Hx_W)
3019{
3020 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSWB, vpackuswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3021 IEMOPMEDIAOPTF3_INIT_VARS( vpackuswb);
3022 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3023}
3024
3025
3026/* Opcode VEX.F3.0F 0x67 - invalid */
3027
3028
3029///**
3030// * Common worker for SSE2 instructions on the form:
3031// * pxxxx xmm1, xmm2/mem128
3032// *
3033// * The 2nd operand is the second half of a register, which in the memory case
3034// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
3035// * where it may read the full 128 bits or only the upper 64 bits.
3036// *
3037// * Exceptions type 4.
3038// */
3039//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
3040//{
3041// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3042// if (IEM_IS_MODRM_REG_MODE(bRm))
3043// {
3044// /*
3045// * Register, register.
3046// */
3047// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3048// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3049// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3050// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
3051// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3052// IEM_MC_PREPARE_SSE_USAGE();
3053// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3054// IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3055// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3056// IEM_MC_ADVANCE_RIP_AND_FINISH();
3057// IEM_MC_END();
3058// }
3059// else
3060// {
3061// /*
3062// * Register, memory.
3063// */
3064// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3065// IEM_MC_ARG(PRTUINT128U, pDst, 0);
3066// IEM_MC_LOCAL(RTUINT128U, uSrc);
3067// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
3068// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3069//
3070// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3071// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
3072// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
3073// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
3074//
3075// IEM_MC_PREPARE_SSE_USAGE();
3076// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3077// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
3078//
3079// IEM_MC_ADVANCE_RIP_AND_FINISH();
3080// IEM_MC_END();
3081// }
3082// return VINF_SUCCESS;
3083//}
3084
3085
3086/* Opcode VEX.0F 0x68 - invalid */
3087
3088/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
3089FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
3090{
3091 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3092 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhbw);
3093 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3094}
3095
3096
3097/* Opcode VEX.F3.0F 0x68 - invalid */
3098
3099
3100/* Opcode VEX.0F 0x69 - invalid */
3101
3102
3103/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
3104FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
3105{
3106 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3107 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhwd);
3108 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3109}
3110
3111
3112/* Opcode VEX.F3.0F 0x69 - invalid */
3113
3114
3115/* Opcode VEX.0F 0x6a - invalid */
3116
3117
3118/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
3119FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
3120{
3121 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3122 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhdq);
3123 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3124}
3125
3126
3127/* Opcode VEX.F3.0F 0x6a - invalid */
3128
3129
3130/* Opcode VEX.0F 0x6b - invalid */
3131
3132
3133/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
3134FNIEMOP_DEF(iemOp_vpackssdw_Vx_Hx_Wx)
3135{
3136 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSDW, vpackssdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3137 IEMOPMEDIAOPTF3_INIT_VARS( vpackssdw);
3138 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3139}
3140
3141
3142/* Opcode VEX.F3.0F 0x6b - invalid */
3143
3144
3145/* Opcode VEX.0F 0x6c - invalid */
3146
3147
3148/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
3149FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
3150{
3151 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3152 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklqdq);
3153 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3154}
3155
3156
3157/* Opcode VEX.F3.0F 0x6c - invalid */
3158/* Opcode VEX.F2.0F 0x6c - invalid */
3159
3160
3161/* Opcode VEX.0F 0x6d - invalid */
3162
3163
3164/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
3165FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
3166{
3167 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3168 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhqdq);
3169 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3170}
3171
3172
3173/* Opcode VEX.F3.0F 0x6d - invalid */
3174
3175
3176/* Opcode VEX.0F 0x6e - invalid */
3177
3178FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
3179{
3180 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3181 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3182 {
3183 /**
3184 * @opcode 0x6e
3185 * @opcodesub rex.w=1
3186 * @oppfx 0x66
3187 * @opcpuid avx
3188 * @opgroup og_avx_simdint_datamov
3189 * @opxcpttype 5
3190 * @optest 64-bit / op1=1 op2=2 -> op1=2
3191 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
3192 */
3193 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3194 if (IEM_IS_MODRM_REG_MODE(bRm))
3195 {
3196 /* XMM, greg64 */
3197 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3198 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3199 IEM_MC_LOCAL(uint64_t, u64Tmp);
3200
3201 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3202 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3203
3204 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3205 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3206
3207 IEM_MC_ADVANCE_RIP_AND_FINISH();
3208 IEM_MC_END();
3209 }
3210 else
3211 {
3212 /* XMM, [mem64] */
3213 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3214 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3215 IEM_MC_LOCAL(uint64_t, u64Tmp);
3216
3217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3218 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3219 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3220 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3221
3222 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3223 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3224
3225 IEM_MC_ADVANCE_RIP_AND_FINISH();
3226 IEM_MC_END();
3227 }
3228 }
3229 else
3230 {
3231 /**
3232 * @opdone
3233 * @opcode 0x6e
3234 * @opcodesub rex.w=0
3235 * @oppfx 0x66
3236 * @opcpuid avx
3237 * @opgroup og_avx_simdint_datamov
3238 * @opxcpttype 5
3239 * @opfunction iemOp_vmovd_q_Vy_Ey
3240 * @optest op1=1 op2=2 -> op1=2
3241 * @optest op1=0 op2=-42 -> op1=-42
3242 */
3243 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
3244 if (IEM_IS_MODRM_REG_MODE(bRm))
3245 {
3246 /* XMM, greg32 */
3247 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3248 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3249 IEM_MC_LOCAL(uint32_t, u32Tmp);
3250
3251 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3252 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3253
3254 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3255 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3256
3257 IEM_MC_ADVANCE_RIP_AND_FINISH();
3258 IEM_MC_END();
3259 }
3260 else
3261 {
3262 /* XMM, [mem32] */
3263 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3265 IEM_MC_LOCAL(uint32_t, u32Tmp);
3266
3267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3268 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3269 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3270 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3271
3272 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3273 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3274
3275 IEM_MC_ADVANCE_RIP_AND_FINISH();
3276 IEM_MC_END();
3277 }
3278 }
3279}
3280
3281
3282/* Opcode VEX.F3.0F 0x6e - invalid */
3283
3284
3285/* Opcode VEX.0F 0x6f - invalid */
3286
3287/**
3288 * @opcode 0x6f
3289 * @oppfx 0x66
3290 * @opcpuid avx
3291 * @opgroup og_avx_simdint_datamove
3292 * @opxcpttype 1
3293 * @optest op1=1 op2=2 -> op1=2
3294 * @optest op1=0 op2=-42 -> op1=-42
3295 */
3296FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
3297{
3298 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3299 Assert(pVCpu->iem.s.uVexLength <= 1);
3300 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3301 if (IEM_IS_MODRM_REG_MODE(bRm))
3302 {
3303 /*
3304 * Register, register.
3305 */
3306 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3307 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3308
3309 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3310 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3311 if (pVCpu->iem.s.uVexLength == 0)
3312 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3313 IEM_GET_MODRM_RM(pVCpu, bRm));
3314 else
3315 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3316 IEM_GET_MODRM_RM(pVCpu, bRm));
3317 IEM_MC_ADVANCE_RIP_AND_FINISH();
3318 IEM_MC_END();
3319 }
3320 else if (pVCpu->iem.s.uVexLength == 0)
3321 {
3322 /*
3323 * Register, memory128.
3324 */
3325 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3326 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3328
3329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3330 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3331 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3332 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3333
3334 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3335 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3336
3337 IEM_MC_ADVANCE_RIP_AND_FINISH();
3338 IEM_MC_END();
3339 }
3340 else
3341 {
3342 /*
3343 * Register, memory256.
3344 */
3345 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3346 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3348
3349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3350 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3351 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3352 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3353
3354 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3355 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3356
3357 IEM_MC_ADVANCE_RIP_AND_FINISH();
3358 IEM_MC_END();
3359 }
3360}
3361
3362/**
3363 * @opcode 0x6f
3364 * @oppfx 0xf3
3365 * @opcpuid avx
3366 * @opgroup og_avx_simdint_datamove
3367 * @opxcpttype 4UA
3368 * @optest op1=1 op2=2 -> op1=2
3369 * @optest op1=0 op2=-42 -> op1=-42
3370 */
3371FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
3372{
3373 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
3374 Assert(pVCpu->iem.s.uVexLength <= 1);
3375 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3376 if (IEM_IS_MODRM_REG_MODE(bRm))
3377 {
3378 /*
3379 * Register, register.
3380 */
3381 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3382 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3383
3384 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3385 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3386 if (pVCpu->iem.s.uVexLength == 0)
3387 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3388 IEM_GET_MODRM_RM(pVCpu, bRm));
3389 else
3390 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
3391 IEM_GET_MODRM_RM(pVCpu, bRm));
3392 IEM_MC_ADVANCE_RIP_AND_FINISH();
3393 IEM_MC_END();
3394 }
3395 else if (pVCpu->iem.s.uVexLength == 0)
3396 {
3397 /*
3398 * Register, memory128.
3399 */
3400 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3401 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
3402 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3403
3404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3405 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3406 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3407 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3408
3409 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3410 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
3411
3412 IEM_MC_ADVANCE_RIP_AND_FINISH();
3413 IEM_MC_END();
3414 }
3415 else
3416 {
3417 /*
3418 * Register, memory256.
3419 */
3420 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3421 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
3422 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3423
3424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3425 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3426 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3427 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3428
3429 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3430 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
3431
3432 IEM_MC_ADVANCE_RIP_AND_FINISH();
3433 IEM_MC_END();
3434 }
3435}
3436
3437
3438/* Opcode VEX.0F 0x70 - invalid */
3439
3440
3441/**
3442 * Common worker for AVX/AVX2 instructions on the forms:
3443 * - vpxxx xmm0, xmm2/mem128, imm8
3444 * - vpxxx ymm0, ymm2/mem256, imm8
3445 *
3446 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3447 */
3448FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3449{
3450 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3451 if (IEM_IS_MODRM_REG_MODE(bRm))
3452 {
3453 /*
3454 * Register, register.
3455 */
3456 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3457 if (pVCpu->iem.s.uVexLength)
3458 {
3459 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3460 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3461 IEM_MC_LOCAL(RTUINT256U, uDst);
3462 IEM_MC_LOCAL(RTUINT256U, uSrc);
3463 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3464 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3465 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3466 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3467 IEM_MC_PREPARE_AVX_USAGE();
3468 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3469 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3470 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3471 IEM_MC_ADVANCE_RIP_AND_FINISH();
3472 IEM_MC_END();
3473 }
3474 else
3475 {
3476 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3477 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3478 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3479 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3480 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3481 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3482 IEM_MC_PREPARE_AVX_USAGE();
3483 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3484 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3485 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3486 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3487 IEM_MC_ADVANCE_RIP_AND_FINISH();
3488 IEM_MC_END();
3489 }
3490 }
3491 else
3492 {
3493 /*
3494 * Register, memory.
3495 */
3496 if (pVCpu->iem.s.uVexLength)
3497 {
3498 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3499 IEM_MC_LOCAL(RTUINT256U, uDst);
3500 IEM_MC_LOCAL(RTUINT256U, uSrc);
3501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3502 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3503 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3504
3505 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3506 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3507 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
3508 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3509 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3510 IEM_MC_PREPARE_AVX_USAGE();
3511
3512 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3513 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3514 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3515
3516 IEM_MC_ADVANCE_RIP_AND_FINISH();
3517 IEM_MC_END();
3518 }
3519 else
3520 {
3521 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3522 IEM_MC_LOCAL(RTUINT128U, uSrc);
3523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3524 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3525 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
3526
3527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
3528 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3529 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3530 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3531 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3532 IEM_MC_PREPARE_AVX_USAGE();
3533
3534 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3535 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
3536 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3537 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3538
3539 IEM_MC_ADVANCE_RIP_AND_FINISH();
3540 IEM_MC_END();
3541 }
3542 }
3543}
3544
3545
3546/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
3547FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
3548{
3549 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3550 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
3551 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
3552
3553}
3554
3555
3556/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
3557FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
3558{
3559 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3560 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
3561 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
3562
3563}
3564
3565
3566/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
3567FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
3568{
3569 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3570 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
3571 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
3572}
3573
3574
3575/**
3576 * Common worker(s) for AVX/AVX2 instructions on the forms:
3577 * - vpxxx xmm0, xmm2, imm8
3578 * - vpxxx ymm0, ymm2, imm8
3579 *
3580 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
3581 */
3582FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128)
3583{
3584 if (IEM_IS_MODRM_REG_MODE(bRm))
3585 {
3586 /*
3587 * Register, register.
3588 */
3589 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3590 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3591 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
3592 IEM_MC_ARG(PRTUINT128U, puDst, 0);
3593 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3594 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3595 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3596 IEM_MC_PREPARE_AVX_USAGE();
3597 IEM_MC_REF_XREG_U128(puDst, IEM_GET_EFFECTIVE_VVVV(pVCpu));
3598 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3599 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
3600 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_EFFECTIVE_VVVV(pVCpu));
3601 IEM_MC_ADVANCE_RIP_AND_FINISH();
3602 IEM_MC_END();
3603 }
3604 /* No memory operand. */
3605 else
3606 IEMOP_RAISE_INVALID_OPCODE_RET();
3607}
3608
3609FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
3610{
3611 if (IEM_IS_MODRM_REG_MODE(bRm))
3612 {
3613 /*
3614 * Register, register.
3615 */
3616 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
3617 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3618 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
3619 IEM_MC_LOCAL(RTUINT256U, uDst);
3620 IEM_MC_LOCAL(RTUINT256U, uSrc);
3621 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
3622 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3623 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
3624 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3625 IEM_MC_PREPARE_AVX_USAGE();
3626 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3627 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
3628 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_EFFECTIVE_VVVV(pVCpu), uDst);
3629 IEM_MC_ADVANCE_RIP_AND_FINISH();
3630 IEM_MC_END();
3631 }
3632 /* No memory operand. */
3633 else
3634 IEMOP_RAISE_INVALID_OPCODE_RET();
3635}
3636
3637
3638/* Opcode VEX.0F 0x71 11/2 - invalid. */
3639/** Opcode VEX.66.0F 0x71 11/2. */
3640FNIEMOP_DEF_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm)
3641{
3642 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLW, vpsrlw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3643 if (pVCpu->iem.s.uVexLength)
3644 {
3645 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3646 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u256, iemAImpl_vpsrlw_imm_u256_fallback));
3647 }
3648 else
3649 {
3650 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3651 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u128, iemAImpl_vpsrlw_imm_u128_fallback));
3652 }
3653}
3654
3655
3656/* Opcode VEX.0F 0x71 11/4 - invalid */
3657/** Opcode VEX.66.0F 0x71 11/4. */
3658FNIEMOP_DEF_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm)
3659{
3660 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAW, vpsraw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3661 if (pVCpu->iem.s.uVexLength)
3662 {
3663 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3664 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u256, iemAImpl_vpsraw_imm_u256_fallback));
3665 }
3666 else
3667 {
3668 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3669 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u128, iemAImpl_vpsraw_imm_u128_fallback));
3670 }
3671}
3672
3673/* Opcode VEX.0F 0x71 11/6 - invalid */
3674
3675/** Opcode VEX.66.0F 0x71 11/6. */
3676FNIEMOP_DEF_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm)
3677{
3678 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLW, vpsllw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3679 if (pVCpu->iem.s.uVexLength)
3680 {
3681 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3682 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u256, iemAImpl_vpsllw_imm_u256_fallback));
3683 }
3684 else
3685 {
3686 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3687 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u128, iemAImpl_vpsllw_imm_u128_fallback));
3688 }
3689}
3690
3691
3692/**
3693 * VEX Group 12 jump table for register variant.
3694 */
3695IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
3696{
3697 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3698 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3699 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3700 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3701 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3702 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3703 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3704 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3705};
3706AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
3707
3708
3709/** Opcode VEX.0F 0x71. */
3710FNIEMOP_DEF(iemOp_VGrp12)
3711{
3712 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3713 if (IEM_IS_MODRM_REG_MODE(bRm))
3714 /* register, register */
3715 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3716 + pVCpu->iem.s.idxPrefix], bRm);
3717 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3718}
3719
3720
3721/* Opcode VEX.0F 0x72 11/2 - invalid. */
3722/** Opcode VEX.66.0F 0x72 11/2. */
3723FNIEMOP_DEF_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm)
3724{
3725 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLD, vpsrld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3726 if (pVCpu->iem.s.uVexLength)
3727 {
3728 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3729 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u256, iemAImpl_vpsrld_imm_u256_fallback));
3730 }
3731 else
3732 {
3733 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3734 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u128, iemAImpl_vpsrld_imm_u128_fallback));
3735 }
3736}
3737
3738
3739/* Opcode VEX.0F 0x72 11/4 - invalid. */
3740/** Opcode VEX.66.0F 0x72 11/4. */
3741FNIEMOP_DEF_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm)
3742{
3743 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAD, vpsrad, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3744 if (pVCpu->iem.s.uVexLength)
3745 {
3746 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3747 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u256, iemAImpl_vpsrad_imm_u256_fallback));
3748 }
3749 else
3750 {
3751 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3752 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u128, iemAImpl_vpsrad_imm_u128_fallback));
3753 }
3754}
3755
3756/* Opcode VEX.0F 0x72 11/6 - invalid. */
3757
3758/** Opcode VEX.66.0F 0x72 11/6. */
3759FNIEMOP_DEF_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm)
3760{
3761 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLD, vpslld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3762 if (pVCpu->iem.s.uVexLength)
3763 {
3764 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3765 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u256, iemAImpl_vpslld_imm_u256_fallback));
3766 }
3767 else
3768 {
3769 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3770 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u128, iemAImpl_vpslld_imm_u128_fallback));
3771 }
3772}
3773
3774
3775/**
3776 * Group 13 jump table for register variant.
3777 */
3778IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
3779{
3780 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3781 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3782 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3783 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3784 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3785 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3786 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3787 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
3788};
3789AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
3790
3791/** Opcode VEX.0F 0x72. */
3792FNIEMOP_DEF(iemOp_VGrp13)
3793{
3794 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3795 if (IEM_IS_MODRM_REG_MODE(bRm))
3796 /* register, register */
3797 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3798 + pVCpu->iem.s.idxPrefix], bRm);
3799 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3800}
3801
3802
3803/* Opcode VEX.0F 0x73 11/2 - invalid. */
3804/** Opcode VEX.66.0F 0x73 11/2. */
3805FNIEMOP_DEF_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm)
3806{
3807 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLQ, vpsrlq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3808 if (pVCpu->iem.s.uVexLength)
3809 {
3810 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3811 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u256, iemAImpl_vpsrlq_imm_u256_fallback));
3812 }
3813 else
3814 {
3815 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3816 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u128, iemAImpl_vpsrlq_imm_u128_fallback));
3817 }
3818}
3819
3820
3821/** Opcode VEX.66.0F 0x73 11/3. */
3822FNIEMOP_DEF_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm)
3823{
3824 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLDQ, vpsrldq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3825 if (pVCpu->iem.s.uVexLength)
3826 {
3827 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3828 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrldq_imm_u256, iemAImpl_vpsrldq_imm_u256_fallback));
3829 }
3830 else
3831 {
3832 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3833 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrldq_imm_u128, iemAImpl_vpsrldq_imm_u128_fallback));
3834 }
3835}
3836
3837/* Opcode VEX.0F 0x73 11/6 - invalid. */
3838
3839/** Opcode VEX.66.0F 0x73 11/6. */
3840FNIEMOP_DEF_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm)
3841{
3842 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLQ, vpsllq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3843 if (pVCpu->iem.s.uVexLength)
3844 {
3845 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3846 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u256, iemAImpl_vpsllq_imm_u256_fallback));
3847 }
3848 else
3849 {
3850 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3851 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u128, iemAImpl_vpsllq_imm_u128_fallback));
3852 }
3853}
3854
3855/** Opcode VEX.66.0F 0x73 11/7. */
3856FNIEMOP_DEF_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm)
3857{
3858 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLDQ, vpslldq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3859 if (pVCpu->iem.s.uVexLength)
3860 {
3861 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
3862 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslldq_imm_u256, iemAImpl_vpslldq_imm_u256_fallback));
3863 }
3864 else
3865 {
3866 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
3867 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslldq_imm_u128, iemAImpl_vpslldq_imm_u128_fallback));
3868 }
3869}
3870
3871/* Opcode VEX.0F 0x73 11/6 - invalid. */
3872
3873/**
3874 * Group 14 jump table for register variant.
3875 */
3876IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
3877{
3878 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3879 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3880 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3881 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3882 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3883 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
3884 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3885 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
3886};
3887AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
3888
3889
3890/** Opcode VEX.0F 0x73. */
3891FNIEMOP_DEF(iemOp_VGrp14)
3892{
3893 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3894 if (IEM_IS_MODRM_REG_MODE(bRm))
3895 /* register, register */
3896 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
3897 + pVCpu->iem.s.idxPrefix], bRm);
3898 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
3899}
3900
3901
3902/* Opcode VEX.0F 0x74 - invalid */
3903
3904
3905/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
3906FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
3907{
3908 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3909 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqb);
3910 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3911}
3912
3913/* Opcode VEX.F3.0F 0x74 - invalid */
3914/* Opcode VEX.F2.0F 0x74 - invalid */
3915
3916
3917/* Opcode VEX.0F 0x75 - invalid */
3918
3919
3920/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
3921FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
3922{
3923 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3924 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqw);
3925 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3926}
3927
3928
3929/* Opcode VEX.F3.0F 0x75 - invalid */
3930/* Opcode VEX.F2.0F 0x75 - invalid */
3931
3932
3933/* Opcode VEX.0F 0x76 - invalid */
3934
3935
3936/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
3937FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
3938{
3939 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
3940 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqd);
3941 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
3942}
3943
3944
3945/* Opcode VEX.F3.0F 0x76 - invalid */
3946/* Opcode VEX.F2.0F 0x76 - invalid */
3947
3948
3949/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
3950FNIEMOP_DEF(iemOp_vzeroupperv__vzeroallv)
3951{
3952 Assert(pVCpu->iem.s.uVexLength <= 1);
3953 if (pVCpu->iem.s.uVexLength == 0)
3954 {
3955 /*
3956 * 128-bit: vzeroupper
3957 */
3958 IEMOP_MNEMONIC(vzeroupper, "vzeroupper");
3959 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3960
3961 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3962 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3963 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
3964
3965 IEM_MC_CLEAR_YREG_128_UP(0);
3966 IEM_MC_CLEAR_YREG_128_UP(1);
3967 IEM_MC_CLEAR_YREG_128_UP(2);
3968 IEM_MC_CLEAR_YREG_128_UP(3);
3969 IEM_MC_CLEAR_YREG_128_UP(4);
3970 IEM_MC_CLEAR_YREG_128_UP(5);
3971 IEM_MC_CLEAR_YREG_128_UP(6);
3972 IEM_MC_CLEAR_YREG_128_UP(7);
3973
3974 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
3975 {
3976 IEM_MC_CLEAR_YREG_128_UP( 8);
3977 IEM_MC_CLEAR_YREG_128_UP( 9);
3978 IEM_MC_CLEAR_YREG_128_UP(10);
3979 IEM_MC_CLEAR_YREG_128_UP(11);
3980 IEM_MC_CLEAR_YREG_128_UP(12);
3981 IEM_MC_CLEAR_YREG_128_UP(13);
3982 IEM_MC_CLEAR_YREG_128_UP(14);
3983 IEM_MC_CLEAR_YREG_128_UP(15);
3984 }
3985
3986 IEM_MC_ADVANCE_RIP_AND_FINISH();
3987 IEM_MC_END();
3988 }
3989 else
3990 {
3991 /*
3992 * 256-bit: vzeroall
3993 */
3994 IEMOP_MNEMONIC(vzeroall, "vzeroall");
3995 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3996
3997 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3998 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3999 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4000
4001 IEM_MC_LOCAL_CONST(uint32_t, uZero, 0);
4002 IEM_MC_STORE_YREG_U32_ZX_VLMAX(0, uZero);
4003 IEM_MC_STORE_YREG_U32_ZX_VLMAX(1, uZero);
4004 IEM_MC_STORE_YREG_U32_ZX_VLMAX(2, uZero);
4005 IEM_MC_STORE_YREG_U32_ZX_VLMAX(3, uZero);
4006 IEM_MC_STORE_YREG_U32_ZX_VLMAX(4, uZero);
4007 IEM_MC_STORE_YREG_U32_ZX_VLMAX(5, uZero);
4008 IEM_MC_STORE_YREG_U32_ZX_VLMAX(6, uZero);
4009 IEM_MC_STORE_YREG_U32_ZX_VLMAX(7, uZero);
4010
4011 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
4012 {
4013 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 8, uZero);
4014 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 9, uZero);
4015 IEM_MC_STORE_YREG_U32_ZX_VLMAX(10, uZero);
4016 IEM_MC_STORE_YREG_U32_ZX_VLMAX(11, uZero);
4017 IEM_MC_STORE_YREG_U32_ZX_VLMAX(12, uZero);
4018 IEM_MC_STORE_YREG_U32_ZX_VLMAX(13, uZero);
4019 IEM_MC_STORE_YREG_U32_ZX_VLMAX(14, uZero);
4020 IEM_MC_STORE_YREG_U32_ZX_VLMAX(15, uZero);
4021 }
4022
4023 IEM_MC_ADVANCE_RIP_AND_FINISH();
4024 IEM_MC_END();
4025 }
4026}
4027
4028
4029/* Opcode VEX.66.0F 0x77 - invalid */
4030/* Opcode VEX.F3.0F 0x77 - invalid */
4031/* Opcode VEX.F2.0F 0x77 - invalid */
4032
4033/* Opcode VEX.0F 0x78 - invalid */
4034/* Opcode VEX.66.0F 0x78 - invalid */
4035/* Opcode VEX.F3.0F 0x78 - invalid */
4036/* Opcode VEX.F2.0F 0x78 - invalid */
4037
4038/* Opcode VEX.0F 0x79 - invalid */
4039/* Opcode VEX.66.0F 0x79 - invalid */
4040/* Opcode VEX.F3.0F 0x79 - invalid */
4041/* Opcode VEX.F2.0F 0x79 - invalid */
4042
4043/* Opcode VEX.0F 0x7a - invalid */
4044/* Opcode VEX.66.0F 0x7a - invalid */
4045/* Opcode VEX.F3.0F 0x7a - invalid */
4046/* Opcode VEX.F2.0F 0x7a - invalid */
4047
4048/* Opcode VEX.0F 0x7b - invalid */
4049/* Opcode VEX.66.0F 0x7b - invalid */
4050/* Opcode VEX.F3.0F 0x7b - invalid */
4051/* Opcode VEX.F2.0F 0x7b - invalid */
4052
4053/* Opcode VEX.0F 0x7c - invalid */
4054/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
4055FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd);
4056/* Opcode VEX.F3.0F 0x7c - invalid */
4057/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
4058FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps);
4059
4060/* Opcode VEX.0F 0x7d - invalid */
4061/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
4062FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd);
4063/* Opcode VEX.F3.0F 0x7d - invalid */
4064/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
4065FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps);
4066
4067
4068/* Opcode VEX.0F 0x7e - invalid */
4069
4070FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
4071{
4072 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4073 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4074 {
4075 /**
4076 * @opcode 0x7e
4077 * @opcodesub rex.w=1
4078 * @oppfx 0x66
4079 * @opcpuid avx
4080 * @opgroup og_avx_simdint_datamov
4081 * @opxcpttype 5
4082 * @optest 64-bit / op1=1 op2=2 -> op1=2
4083 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4084 */
4085 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4086 if (IEM_IS_MODRM_REG_MODE(bRm))
4087 {
4088 /* greg64, XMM */
4089 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4090 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4091 IEM_MC_LOCAL(uint64_t, u64Tmp);
4092
4093 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4094 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4095
4096 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4097 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
4098
4099 IEM_MC_ADVANCE_RIP_AND_FINISH();
4100 IEM_MC_END();
4101 }
4102 else
4103 {
4104 /* [mem64], XMM */
4105 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
4106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4107 IEM_MC_LOCAL(uint64_t, u64Tmp);
4108
4109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4110 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4111 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4112 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4113
4114 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4115 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
4116
4117 IEM_MC_ADVANCE_RIP_AND_FINISH();
4118 IEM_MC_END();
4119 }
4120 }
4121 else
4122 {
4123 /**
4124 * @opdone
4125 * @opcode 0x7e
4126 * @opcodesub rex.w=0
4127 * @oppfx 0x66
4128 * @opcpuid avx
4129 * @opgroup og_avx_simdint_datamov
4130 * @opxcpttype 5
4131 * @opfunction iemOp_vmovd_q_Vy_Ey
4132 * @optest op1=1 op2=2 -> op1=2
4133 * @optest op1=0 op2=-42 -> op1=-42
4134 */
4135 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4136 if (IEM_IS_MODRM_REG_MODE(bRm))
4137 {
4138 /* greg32, XMM */
4139 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4140 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4141 IEM_MC_LOCAL(uint32_t, u32Tmp);
4142
4143 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4144 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4145
4146 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4147 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
4148
4149 IEM_MC_ADVANCE_RIP_AND_FINISH();
4150 IEM_MC_END();
4151 }
4152 else
4153 {
4154 /* [mem32], XMM */
4155 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4157 IEM_MC_LOCAL(uint32_t, u32Tmp);
4158
4159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4160 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4161 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4162 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4163
4164 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4165 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
4166
4167 IEM_MC_ADVANCE_RIP_AND_FINISH();
4168 IEM_MC_END();
4169 }
4170 }
4171}
4172
4173
4174/**
4175 * @opcode 0x7e
4176 * @oppfx 0xf3
4177 * @opcpuid avx
4178 * @opgroup og_avx_pcksclr_datamove
4179 * @opxcpttype none
4180 * @optest op1=1 op2=2 -> op1=2
4181 * @optest op1=0 op2=-42 -> op1=-42
4182 */
4183FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
4184{
4185 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4186 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4187 if (IEM_IS_MODRM_REG_MODE(bRm))
4188 {
4189 /*
4190 * Register, register.
4191 */
4192 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4193 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4194
4195 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4196 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4197
4198 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4199 IEM_GET_MODRM_RM(pVCpu, bRm));
4200 IEM_MC_ADVANCE_RIP_AND_FINISH();
4201 IEM_MC_END();
4202 }
4203 else
4204 {
4205 /*
4206 * Memory, register.
4207 */
4208 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4209 IEM_MC_LOCAL(uint64_t, uSrc);
4210 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4211
4212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4213 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4214 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4215 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4216
4217 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4218 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
4219
4220 IEM_MC_ADVANCE_RIP_AND_FINISH();
4221 IEM_MC_END();
4222 }
4223
4224}
4225/* Opcode VEX.F2.0F 0x7e - invalid */
4226
4227
4228/* Opcode VEX.0F 0x7f - invalid */
4229
4230/**
4231 * @opcode 0x7f
4232 * @oppfx 0x66
4233 * @opcpuid avx
4234 * @opgroup og_avx_simdint_datamove
4235 * @opxcpttype 1
4236 * @optest op1=1 op2=2 -> op1=2
4237 * @optest op1=0 op2=-42 -> op1=-42
4238 */
4239FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
4240{
4241 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4242 Assert(pVCpu->iem.s.uVexLength <= 1);
4243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4244 if (IEM_IS_MODRM_REG_MODE(bRm))
4245 {
4246 /*
4247 * Register, register.
4248 */
4249 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4250 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4251
4252 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4253 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4254 if (pVCpu->iem.s.uVexLength == 0)
4255 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4256 IEM_GET_MODRM_REG(pVCpu, bRm));
4257 else
4258 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4259 IEM_GET_MODRM_REG(pVCpu, bRm));
4260 IEM_MC_ADVANCE_RIP_AND_FINISH();
4261 IEM_MC_END();
4262 }
4263 else if (pVCpu->iem.s.uVexLength == 0)
4264 {
4265 /*
4266 * Register, memory128.
4267 */
4268 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4269 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4271
4272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4273 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4274 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4275 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4276
4277 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
4278 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4279
4280 IEM_MC_ADVANCE_RIP_AND_FINISH();
4281 IEM_MC_END();
4282 }
4283 else
4284 {
4285 /*
4286 * Register, memory256.
4287 */
4288 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4289 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4290 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4291
4292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4293 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4294 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4295 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4296
4297 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4298 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4299
4300 IEM_MC_ADVANCE_RIP_AND_FINISH();
4301 IEM_MC_END();
4302 }
4303}
4304
4305
4306/**
4307 * @opcode 0x7f
4308 * @oppfx 0xf3
4309 * @opcpuid avx
4310 * @opgroup og_avx_simdint_datamove
4311 * @opxcpttype 4UA
4312 * @optest op1=1 op2=2 -> op1=2
4313 * @optest op1=0 op2=-42 -> op1=-42
4314 */
4315FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
4316{
4317 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4318 Assert(pVCpu->iem.s.uVexLength <= 1);
4319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4320 if (IEM_IS_MODRM_REG_MODE(bRm))
4321 {
4322 /*
4323 * Register, register.
4324 */
4325 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4326 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4327
4328 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4329 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4330 if (pVCpu->iem.s.uVexLength == 0)
4331 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4332 IEM_GET_MODRM_REG(pVCpu, bRm));
4333 else
4334 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4335 IEM_GET_MODRM_REG(pVCpu, bRm));
4336 IEM_MC_ADVANCE_RIP_AND_FINISH();
4337 IEM_MC_END();
4338 }
4339 else if (pVCpu->iem.s.uVexLength == 0)
4340 {
4341 /*
4342 * Register, memory128.
4343 */
4344 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4345 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4347
4348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4349 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4350 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4351 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4352
4353 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
4354 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
4355
4356 IEM_MC_ADVANCE_RIP_AND_FINISH();
4357 IEM_MC_END();
4358 }
4359 else
4360 {
4361 /*
4362 * Register, memory256.
4363 */
4364 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4365 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4367
4368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4369 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4370 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4371 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4372
4373 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
4374 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
4375
4376 IEM_MC_ADVANCE_RIP_AND_FINISH();
4377 IEM_MC_END();
4378 }
4379}
4380
4381/* Opcode VEX.F2.0F 0x7f - invalid */
4382
4383
4384/* Opcode VEX.0F 0x80 - invalid */
4385/* Opcode VEX.0F 0x81 - invalid */
4386/* Opcode VEX.0F 0x82 - invalid */
4387/* Opcode VEX.0F 0x83 - invalid */
4388/* Opcode VEX.0F 0x84 - invalid */
4389/* Opcode VEX.0F 0x85 - invalid */
4390/* Opcode VEX.0F 0x86 - invalid */
4391/* Opcode VEX.0F 0x87 - invalid */
4392/* Opcode VEX.0F 0x88 - invalid */
4393/* Opcode VEX.0F 0x89 - invalid */
4394/* Opcode VEX.0F 0x8a - invalid */
4395/* Opcode VEX.0F 0x8b - invalid */
4396/* Opcode VEX.0F 0x8c - invalid */
4397/* Opcode VEX.0F 0x8d - invalid */
4398/* Opcode VEX.0F 0x8e - invalid */
4399/* Opcode VEX.0F 0x8f - invalid */
4400/* Opcode VEX.0F 0x90 - invalid */
4401/* Opcode VEX.0F 0x91 - invalid */
4402/* Opcode VEX.0F 0x92 - invalid */
4403/* Opcode VEX.0F 0x93 - invalid */
4404/* Opcode VEX.0F 0x94 - invalid */
4405/* Opcode VEX.0F 0x95 - invalid */
4406/* Opcode VEX.0F 0x96 - invalid */
4407/* Opcode VEX.0F 0x97 - invalid */
4408/* Opcode VEX.0F 0x98 - invalid */
4409/* Opcode VEX.0F 0x99 - invalid */
4410/* Opcode VEX.0F 0x9a - invalid */
4411/* Opcode VEX.0F 0x9b - invalid */
4412/* Opcode VEX.0F 0x9c - invalid */
4413/* Opcode VEX.0F 0x9d - invalid */
4414/* Opcode VEX.0F 0x9e - invalid */
4415/* Opcode VEX.0F 0x9f - invalid */
4416/* Opcode VEX.0F 0xa0 - invalid */
4417/* Opcode VEX.0F 0xa1 - invalid */
4418/* Opcode VEX.0F 0xa2 - invalid */
4419/* Opcode VEX.0F 0xa3 - invalid */
4420/* Opcode VEX.0F 0xa4 - invalid */
4421/* Opcode VEX.0F 0xa5 - invalid */
4422/* Opcode VEX.0F 0xa6 - invalid */
4423/* Opcode VEX.0F 0xa7 - invalid */
4424/* Opcode VEX.0F 0xa8 - invalid */
4425/* Opcode VEX.0F 0xa9 - invalid */
4426/* Opcode VEX.0F 0xaa - invalid */
4427/* Opcode VEX.0F 0xab - invalid */
4428/* Opcode VEX.0F 0xac - invalid */
4429/* Opcode VEX.0F 0xad - invalid */
4430
4431
4432/* Opcode VEX.0F 0xae mem/0 - invalid. */
4433/* Opcode VEX.0F 0xae mem/1 - invalid. */
4434
4435/**
4436 * @ opmaps grp15
4437 * @ opcode !11/2
4438 * @ oppfx none
4439 * @ opcpuid sse
4440 * @ opgroup og_sse_mxcsrsm
4441 * @ opxcpttype 5
4442 * @ optest op1=0 -> mxcsr=0
4443 * @ optest op1=0x2083 -> mxcsr=0x2083
4444 * @ optest op1=0xfffffffe -> value.xcpt=0xd
4445 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
4446 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
4447 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
4448 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
4449 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
4450 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4451 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4452 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4453 */
4454FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm);
4455//FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
4456//{
4457// IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
4458// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4459// IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4460// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4461// IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4462// IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4463// IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
4464// IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff);
4465// IEM_MC_END();
4466// return VINF_SUCCESS;
4467//}
4468
4469
4470/**
4471 * @opmaps vexgrp15
4472 * @opcode !11/3
4473 * @oppfx none
4474 * @opcpuid avx
4475 * @opgroup og_avx_mxcsrsm
4476 * @opxcpttype 5
4477 * @optest mxcsr=0 -> op1=0
4478 * @optest mxcsr=0x2083 -> op1=0x2083
4479 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
4480 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
4481 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
4482 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
4483 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
4484 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
4485 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
4486 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
4487 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
4488 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
4489 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
4490 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
4491 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
4492 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
4493 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
4494 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
4495 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
4496 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
4497 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
4498 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
4499 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
4500 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
4501 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
4502 * -> value.xcpt=0x6
4503 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
4504 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
4505 * APMv4 rev 3.17 page 509.
4506 * @todo Test this instruction on AMD Ryzen.
4507 */
4508FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
4509{
4510 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4511 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4512 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4514 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4515 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
4516 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
4517 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
4518 IEM_MC_END();
4519}
4520
4521/* Opcode VEX.0F 0xae mem/4 - invalid. */
4522/* Opcode VEX.0F 0xae mem/5 - invalid. */
4523/* Opcode VEX.0F 0xae mem/6 - invalid. */
4524/* Opcode VEX.0F 0xae mem/7 - invalid. */
4525
4526/* Opcode VEX.0F 0xae 11b/0 - invalid. */
4527/* Opcode VEX.0F 0xae 11b/1 - invalid. */
4528/* Opcode VEX.0F 0xae 11b/2 - invalid. */
4529/* Opcode VEX.0F 0xae 11b/3 - invalid. */
4530/* Opcode VEX.0F 0xae 11b/4 - invalid. */
4531/* Opcode VEX.0F 0xae 11b/5 - invalid. */
4532/* Opcode VEX.0F 0xae 11b/6 - invalid. */
4533/* Opcode VEX.0F 0xae 11b/7 - invalid. */
4534
4535/**
4536 * Vex group 15 jump table for memory variant.
4537 */
4538IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
4539{ /* pfx: none, 066h, 0f3h, 0f2h */
4540 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4541 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4542 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4543 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4544 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4545 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4546 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4547 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
4548};
4549AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
4550
4551
4552/** Opcode vex. 0xae. */
4553FNIEMOP_DEF(iemOp_VGrp15)
4554{
4555 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4556 if (IEM_IS_MODRM_REG_MODE(bRm))
4557 /* register, register */
4558 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
4559
4560 /* memory, register */
4561 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4562 + pVCpu->iem.s.idxPrefix], bRm);
4563}
4564
4565
4566/* Opcode VEX.0F 0xaf - invalid. */
4567
4568/* Opcode VEX.0F 0xb0 - invalid. */
4569/* Opcode VEX.0F 0xb1 - invalid. */
4570/* Opcode VEX.0F 0xb2 - invalid. */
4571/* Opcode VEX.0F 0xb2 - invalid. */
4572/* Opcode VEX.0F 0xb3 - invalid. */
4573/* Opcode VEX.0F 0xb4 - invalid. */
4574/* Opcode VEX.0F 0xb5 - invalid. */
4575/* Opcode VEX.0F 0xb6 - invalid. */
4576/* Opcode VEX.0F 0xb7 - invalid. */
4577/* Opcode VEX.0F 0xb8 - invalid. */
4578/* Opcode VEX.0F 0xb9 - invalid. */
4579/* Opcode VEX.0F 0xba - invalid. */
4580/* Opcode VEX.0F 0xbb - invalid. */
4581/* Opcode VEX.0F 0xbc - invalid. */
4582/* Opcode VEX.0F 0xbd - invalid. */
4583/* Opcode VEX.0F 0xbe - invalid. */
4584/* Opcode VEX.0F 0xbf - invalid. */
4585
4586/* Opcode VEX.0F 0xc0 - invalid. */
4587/* Opcode VEX.66.0F 0xc0 - invalid. */
4588/* Opcode VEX.F3.0F 0xc0 - invalid. */
4589/* Opcode VEX.F2.0F 0xc0 - invalid. */
4590
4591/* Opcode VEX.0F 0xc1 - invalid. */
4592/* Opcode VEX.66.0F 0xc1 - invalid. */
4593/* Opcode VEX.F3.0F 0xc1 - invalid. */
4594/* Opcode VEX.F2.0F 0xc1 - invalid. */
4595
4596/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
4597FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib);
4598/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
4599FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib);
4600/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
4601FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib);
4602/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
4603FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib);
4604
4605/* Opcode VEX.0F 0xc3 - invalid */
4606/* Opcode VEX.66.0F 0xc3 - invalid */
4607/* Opcode VEX.F3.0F 0xc3 - invalid */
4608/* Opcode VEX.F2.0F 0xc3 - invalid */
4609
4610/* Opcode VEX.0F 0xc4 - invalid */
4611
4612
4613/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
4614FNIEMOP_DEF(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib)
4615{
4616 /*IEMOP_MNEMONIC4(VEX_RMV, VPINSRW, vpinsrw, Vdq, Vdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
4617 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4618 if (IEM_IS_MODRM_REG_MODE(bRm))
4619 {
4620 /*
4621 * Register, register.
4622 */
4623 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4624 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4625 IEM_MC_LOCAL(RTUINT128U, uSrc1);
4626 IEM_MC_LOCAL(uint16_t, uValue);
4627
4628 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4629 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4630 IEM_MC_PREPARE_AVX_USAGE();
4631
4632 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
4633 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
4634 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
4635 IEM_MC_STORE_XREG_U16( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
4636 IEM_MC_ADVANCE_RIP_AND_FINISH();
4637 IEM_MC_END();
4638 }
4639 else
4640 {
4641 /*
4642 * Register, memory.
4643 */
4644 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4646 IEM_MC_LOCAL(RTUINT128U, uSrc1);
4647 IEM_MC_LOCAL(uint16_t, uValue);
4648
4649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4650 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4651 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4652 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4653 IEM_MC_PREPARE_AVX_USAGE();
4654
4655 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
4656 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4657 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
4658 IEM_MC_STORE_XREG_U16( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
4659 IEM_MC_ADVANCE_RIP_AND_FINISH();
4660 IEM_MC_END();
4661 }
4662}
4663
4664
4665/* Opcode VEX.F3.0F 0xc4 - invalid */
4666/* Opcode VEX.F2.0F 0xc4 - invalid */
4667
4668/* Opcode VEX.0F 0xc5 - invalid */
4669
4670
4671/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
4672FNIEMOP_DEF(iemOp_vpextrw_Gd_Udq_Ib)
4673{
4674 IEMOP_MNEMONIC3(VEX_RMI_REG, VPEXTRW, vpextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
4675 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4676 if (IEM_IS_MODRM_REG_MODE(bRm))
4677 {
4678 /*
4679 * greg32, XMM, imm8.
4680 */
4681 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4682 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4683 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
4684 IEM_MC_LOCAL(uint16_t, uValue);
4685 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4686 IEM_MC_PREPARE_AVX_USAGE();
4687 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm), bImm & 7);
4688 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
4689 IEM_MC_ADVANCE_RIP_AND_FINISH();
4690 IEM_MC_END();
4691 }
4692 /* No memory operand. */
4693 else
4694 IEMOP_RAISE_INVALID_OPCODE_RET();
4695}
4696
4697
4698/* Opcode VEX.F3.0F 0xc5 - invalid */
4699/* Opcode VEX.F2.0F 0xc5 - invalid */
4700
4701
4702#define VSHUFP_X(a_Instr) \
4703 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
4704 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4705 { \
4706 /* \
4707 * Register, register. \
4708 */ \
4709 if (pVCpu->iem.s.uVexLength) \
4710 { \
4711 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4712 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
4713 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4714 IEM_MC_LOCAL(RTUINT256U, uDst); \
4715 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4716 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4717 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4718 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4719 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4720 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4721 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4722 IEM_MC_PREPARE_AVX_USAGE(); \
4723 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4724 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4725 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4726 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4727 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4728 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4729 IEM_MC_END(); \
4730 } \
4731 else \
4732 { \
4733 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4734 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
4735 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4736 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4737 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4738 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); \
4739 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4740 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4741 IEM_MC_PREPARE_AVX_USAGE(); \
4742 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4743 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4744 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4745 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4746 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4747 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4748 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4749 IEM_MC_END(); \
4750 } \
4751 } \
4752 else \
4753 { \
4754 /* \
4755 * Register, memory. \
4756 */ \
4757 if (pVCpu->iem.s.uVexLength) \
4758 { \
4759 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
4760 IEM_MC_LOCAL(RTUINT256U, uDst); \
4761 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
4762 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
4763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4764 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
4765 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
4766 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
4767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
4768 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4769 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4770 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
4771 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4772 IEM_MC_PREPARE_AVX_USAGE(); \
4773 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4774 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4775 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
4776 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4777 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
4778 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4779 IEM_MC_END(); \
4780 } \
4781 else \
4782 { \
4783 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
4784 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
4785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
4786 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
4787 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
4788 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); \
4789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
4790 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
4791 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
4792 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
4793 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
4794 IEM_MC_PREPARE_AVX_USAGE(); \
4795 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
4796 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
4797 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
4798 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
4799 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
4800 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
4801 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4802 IEM_MC_END(); \
4803 } \
4804 } \
4805 (void)0
4806
4807/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
4808FNIEMOP_DEF(iemOp_vshufps_Vps_Hps_Wps_Ib)
4809{
4810 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPS, vshufps, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4811 VSHUFP_X(vshufps);
4812}
4813
4814
4815/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
4816FNIEMOP_DEF(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib)
4817{
4818 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPD, vshufpd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
4819 VSHUFP_X(vshufpd);
4820}
4821#undef VSHUFP_X
4822
4823
4824/* Opcode VEX.F3.0F 0xc6 - invalid */
4825/* Opcode VEX.F2.0F 0xc6 - invalid */
4826
4827/* Opcode VEX.0F 0xc7 - invalid */
4828/* Opcode VEX.66.0F 0xc7 - invalid */
4829/* Opcode VEX.F3.0F 0xc7 - invalid */
4830/* Opcode VEX.F2.0F 0xc7 - invalid */
4831
4832/* Opcode VEX.0F 0xc8 - invalid */
4833/* Opcode VEX.0F 0xc9 - invalid */
4834/* Opcode VEX.0F 0xca - invalid */
4835/* Opcode VEX.0F 0xcb - invalid */
4836/* Opcode VEX.0F 0xcc - invalid */
4837/* Opcode VEX.0F 0xcd - invalid */
4838/* Opcode VEX.0F 0xce - invalid */
4839/* Opcode VEX.0F 0xcf - invalid */
4840
4841
4842/* Opcode VEX.0F 0xd0 - invalid */
4843/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
4844FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd);
4845/* Opcode VEX.F3.0F 0xd0 - invalid */
4846/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
4847FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps);
4848
4849/* Opcode VEX.0F 0xd1 - invalid */
4850/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
4851FNIEMOP_DEF(iemOp_vpsrlw_Vx_Hx_W)
4852{
4853 IEMOP_MNEMONIC3(VEX_RVM, VPSRLW, vpsrlw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4854 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlw);
4855 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4856}
4857
4858/* Opcode VEX.F3.0F 0xd1 - invalid */
4859/* Opcode VEX.F2.0F 0xd1 - invalid */
4860
4861/* Opcode VEX.0F 0xd2 - invalid */
4862/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
4863FNIEMOP_DEF(iemOp_vpsrld_Vx_Hx_Wx)
4864{
4865 IEMOP_MNEMONIC3(VEX_RVM, VPSRLD, vpsrld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4866 IEMOPMEDIAOPTF3_INIT_VARS(vpsrld);
4867 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4868}
4869
4870/* Opcode VEX.F3.0F 0xd2 - invalid */
4871/* Opcode VEX.F2.0F 0xd2 - invalid */
4872
4873/* Opcode VEX.0F 0xd3 - invalid */
4874/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
4875FNIEMOP_DEF(iemOp_vpsrlq_Vx_Hx_Wx)
4876{
4877 IEMOP_MNEMONIC3(VEX_RVM, VPSRLQ, vpsrlq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4878 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlq);
4879 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4880}
4881
4882/* Opcode VEX.F3.0F 0xd3 - invalid */
4883/* Opcode VEX.F2.0F 0xd3 - invalid */
4884
4885/* Opcode VEX.0F 0xd4 - invalid */
4886
4887
4888/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
4889FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
4890{
4891 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4892 IEMOPMEDIAOPTF3_INIT_VARS( vpaddq);
4893 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4894}
4895
4896
4897/* Opcode VEX.F3.0F 0xd4 - invalid */
4898/* Opcode VEX.F2.0F 0xd4 - invalid */
4899
4900/* Opcode VEX.0F 0xd5 - invalid */
4901
4902
4903/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
4904FNIEMOP_DEF(iemOp_vpmullw_Vx_Hx_Wx)
4905{
4906 IEMOP_MNEMONIC3(VEX_RVM, VPMULLW, vpmullw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4907 IEMOPMEDIAOPTF3_INIT_VARS(vpmullw);
4908 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4909}
4910
4911
4912/* Opcode VEX.F3.0F 0xd5 - invalid */
4913/* Opcode VEX.F2.0F 0xd5 - invalid */
4914
4915/* Opcode VEX.0F 0xd6 - invalid */
4916
4917/**
4918 * @opcode 0xd6
4919 * @oppfx 0x66
4920 * @opcpuid avx
4921 * @opgroup og_avx_pcksclr_datamove
4922 * @opxcpttype none
4923 * @optest op1=-1 op2=2 -> op1=2
4924 * @optest op1=0 op2=-42 -> op1=-42
4925 */
4926FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
4927{
4928 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
4929 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4930 if (IEM_IS_MODRM_REG_MODE(bRm))
4931 {
4932 /*
4933 * Register, register.
4934 */
4935 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4936 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4937
4938 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4939 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4940
4941 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
4942 IEM_GET_MODRM_REG(pVCpu, bRm));
4943 IEM_MC_ADVANCE_RIP_AND_FINISH();
4944 IEM_MC_END();
4945 }
4946 else
4947 {
4948 /*
4949 * Memory, register.
4950 */
4951 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4952 IEM_MC_LOCAL(uint64_t, uSrc);
4953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4954
4955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4956 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4957 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4958 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
4959
4960 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
4961 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
4962
4963 IEM_MC_ADVANCE_RIP_AND_FINISH();
4964 IEM_MC_END();
4965 }
4966}
4967
4968/* Opcode VEX.F3.0F 0xd6 - invalid */
4969/* Opcode VEX.F2.0F 0xd6 - invalid */
4970
4971
4972/* Opcode VEX.0F 0xd7 - invalid */
4973
4974/** Opcode VEX.66.0F 0xd7 - */
4975FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
4976{
4977 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4978 /* Docs says register only. */
4979 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
4980 {
4981 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
4982 IEMOP_MNEMONIC2(VEX_RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
4983 if (pVCpu->iem.s.uVexLength)
4984 {
4985 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4986 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
4987 IEM_MC_ARG(uint64_t *, puDst, 0);
4988 IEM_MC_LOCAL(RTUINT256U, uSrc);
4989 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4990 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4991 IEM_MC_PREPARE_AVX_USAGE();
4992 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4993 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4994 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
4995 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
4996 IEM_MC_ADVANCE_RIP_AND_FINISH();
4997 IEM_MC_END();
4998 }
4999 else
5000 {
5001 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5002 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5003 IEM_MC_ARG(uint64_t *, puDst, 0);
5004 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
5005 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5006 IEM_MC_PREPARE_AVX_USAGE();
5007 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
5008 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
5009 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
5010 IEM_MC_ADVANCE_RIP_AND_FINISH();
5011 IEM_MC_END();
5012 }
5013 }
5014 else
5015 IEMOP_RAISE_INVALID_OPCODE_RET();
5016}
5017
5018
5019/* Opcode VEX.F3.0F 0xd7 - invalid */
5020/* Opcode VEX.F2.0F 0xd7 - invalid */
5021
5022
5023/* Opcode VEX.0F 0xd8 - invalid */
5024
5025/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, Wx */
5026FNIEMOP_DEF(iemOp_vpsubusb_Vx_Hx_Wx)
5027{
5028 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSB, vpsubusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5029 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusb);
5030 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5031}
5032
5033
5034/* Opcode VEX.F3.0F 0xd8 - invalid */
5035/* Opcode VEX.F2.0F 0xd8 - invalid */
5036
5037/* Opcode VEX.0F 0xd9 - invalid */
5038
5039
5040/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
5041FNIEMOP_DEF(iemOp_vpsubusw_Vx_Hx_Wx)
5042{
5043 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSW, vpsubusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5044 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusw);
5045 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5046}
5047
5048
5049/* Opcode VEX.F3.0F 0xd9 - invalid */
5050/* Opcode VEX.F2.0F 0xd9 - invalid */
5051
5052/* Opcode VEX.0F 0xda - invalid */
5053
5054
5055/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
5056FNIEMOP_DEF(iemOp_vpminub_Vx_Hx_Wx)
5057{
5058 IEMOP_MNEMONIC3(VEX_RVM, VPMINUB, vpminub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5059 IEMOPMEDIAOPTF3_INIT_VARS(vpminub);
5060 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5061}
5062
5063
5064/* Opcode VEX.F3.0F 0xda - invalid */
5065/* Opcode VEX.F2.0F 0xda - invalid */
5066
5067/* Opcode VEX.0F 0xdb - invalid */
5068
5069
5070/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
5071FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
5072{
5073 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5074 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
5075 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
5076}
5077
5078
5079/* Opcode VEX.F3.0F 0xdb - invalid */
5080/* Opcode VEX.F2.0F 0xdb - invalid */
5081
5082/* Opcode VEX.0F 0xdc - invalid */
5083
5084
5085/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
5086FNIEMOP_DEF(iemOp_vpaddusb_Vx_Hx_Wx)
5087{
5088 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSB, vpaddusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5089 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusb);
5090 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5091}
5092
5093
5094/* Opcode VEX.F3.0F 0xdc - invalid */
5095/* Opcode VEX.F2.0F 0xdc - invalid */
5096
5097/* Opcode VEX.0F 0xdd - invalid */
5098
5099
5100/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
5101FNIEMOP_DEF(iemOp_vpaddusw_Vx_Hx_Wx)
5102{
5103 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSW, vpaddusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5104 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusw);
5105 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5106}
5107
5108
5109/* Opcode VEX.F3.0F 0xdd - invalid */
5110/* Opcode VEX.F2.0F 0xdd - invalid */
5111
5112/* Opcode VEX.0F 0xde - invalid */
5113
5114
5115/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, Wx */
5116FNIEMOP_DEF(iemOp_vpmaxub_Vx_Hx_Wx)
5117{
5118 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUB, vpmaxub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5119 IEMOPMEDIAOPTF3_INIT_VARS(vpmaxub);
5120 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5121}
5122
5123
5124/* Opcode VEX.F3.0F 0xde - invalid */
5125/* Opcode VEX.F2.0F 0xde - invalid */
5126
5127/* Opcode VEX.0F 0xdf - invalid */
5128
5129
5130/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
5131FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
5132{
5133 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5134 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
5135 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
5136}
5137
5138
5139/* Opcode VEX.F3.0F 0xdf - invalid */
5140/* Opcode VEX.F2.0F 0xdf - invalid */
5141
5142/* Opcode VEX.0F 0xe0 - invalid */
5143
5144
5145/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
5146FNIEMOP_DEF(iemOp_vpavgb_Vx_Hx_Wx)
5147{
5148 IEMOP_MNEMONIC3(VEX_RVM, VPAVGB, vpavgb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5149 IEMOPMEDIAOPTF3_INIT_VARS(vpavgb);
5150 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5151}
5152
5153
5154/* Opcode VEX.F3.0F 0xe0 - invalid */
5155/* Opcode VEX.F2.0F 0xe0 - invalid */
5156
5157/* Opcode VEX.0F 0xe1 - invalid */
5158/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
5159FNIEMOP_DEF(iemOp_vpsraw_Vx_Hx_W)
5160{
5161 IEMOP_MNEMONIC3(VEX_RVM, VPSRAW, vpsraw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5162 IEMOPMEDIAOPTF3_INIT_VARS(vpsraw);
5163 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5164}
5165
5166/* Opcode VEX.F3.0F 0xe1 - invalid */
5167/* Opcode VEX.F2.0F 0xe1 - invalid */
5168
5169/* Opcode VEX.0F 0xe2 - invalid */
5170/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
5171FNIEMOP_DEF(iemOp_vpsrad_Vx_Hx_Wx)
5172{
5173 IEMOP_MNEMONIC3(VEX_RVM, VPSRAD, vpsrad, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5174 IEMOPMEDIAOPTF3_INIT_VARS(vpsrad);
5175 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5176}
5177
5178/* Opcode VEX.F3.0F 0xe2 - invalid */
5179/* Opcode VEX.F2.0F 0xe2 - invalid */
5180
5181/* Opcode VEX.0F 0xe3 - invalid */
5182
5183
5184/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
5185FNIEMOP_DEF(iemOp_vpavgw_Vx_Hx_Wx)
5186{
5187 IEMOP_MNEMONIC3(VEX_RVM, VPAVGW, vpavgw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5188 IEMOPMEDIAOPTF3_INIT_VARS(vpavgw);
5189 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5190}
5191
5192
5193/* Opcode VEX.F3.0F 0xe3 - invalid */
5194/* Opcode VEX.F2.0F 0xe3 - invalid */
5195
5196/* Opcode VEX.0F 0xe4 - invalid */
5197
5198
5199/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, Wx */
5200FNIEMOP_DEF(iemOp_vpmulhuw_Vx_Hx_Wx)
5201{
5202 IEMOP_MNEMONIC3(VEX_RVM, VPMULHUW, vpmulhuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5203 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhuw);
5204 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5205}
5206
5207
5208/* Opcode VEX.F3.0F 0xe4 - invalid */
5209/* Opcode VEX.F2.0F 0xe4 - invalid */
5210
5211/* Opcode VEX.0F 0xe5 - invalid */
5212
5213
5214/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
5215FNIEMOP_DEF(iemOp_vpmulhw_Vx_Hx_Wx)
5216{
5217 IEMOP_MNEMONIC3(VEX_RVM, VPMULHW, vpmulhw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5218 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhw);
5219 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5220}
5221
5222
5223/* Opcode VEX.F3.0F 0xe5 - invalid */
5224/* Opcode VEX.F2.0F 0xe5 - invalid */
5225
5226/* Opcode VEX.0F 0xe6 - invalid */
5227/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
5228FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd);
5229/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
5230FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd);
5231/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
5232FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd);
5233
5234
5235/* Opcode VEX.0F 0xe7 - invalid */
5236
5237/**
5238 * @opcode 0xe7
5239 * @opcodesub !11 mr/reg
5240 * @oppfx 0x66
5241 * @opcpuid avx
5242 * @opgroup og_avx_cachect
5243 * @opxcpttype 1
5244 * @optest op1=-1 op2=2 -> op1=2
5245 * @optest op1=0 op2=-42 -> op1=-42
5246 */
5247FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
5248{
5249 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5250 Assert(pVCpu->iem.s.uVexLength <= 1);
5251 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5252 if (IEM_IS_MODRM_MEM_MODE(bRm))
5253 {
5254 if (pVCpu->iem.s.uVexLength == 0)
5255 {
5256 /*
5257 * 128-bit: Memory, register.
5258 */
5259 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5260 IEM_MC_LOCAL(RTUINT128U, uSrc);
5261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5262
5263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5264 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5265 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5266 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5267
5268 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
5269 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5270
5271 IEM_MC_ADVANCE_RIP_AND_FINISH();
5272 IEM_MC_END();
5273 }
5274 else
5275 {
5276 /*
5277 * 256-bit: Memory, register.
5278 */
5279 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5280 IEM_MC_LOCAL(RTUINT256U, uSrc);
5281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5282
5283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5284 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5285 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5286 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5287
5288 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
5289 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
5290
5291 IEM_MC_ADVANCE_RIP_AND_FINISH();
5292 IEM_MC_END();
5293 }
5294 }
5295 /**
5296 * @opdone
5297 * @opmnemonic udvex660fe7reg
5298 * @opcode 0xe7
5299 * @opcodesub 11 mr/reg
5300 * @oppfx 0x66
5301 * @opunused immediate
5302 * @opcpuid avx
5303 * @optest ->
5304 */
5305 else
5306 IEMOP_RAISE_INVALID_OPCODE_RET();
5307}
5308
5309/* Opcode VEX.F3.0F 0xe7 - invalid */
5310/* Opcode VEX.F2.0F 0xe7 - invalid */
5311
5312
5313/* Opcode VEX.0F 0xe8 - invalid */
5314
5315
5316/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, Wx */
5317FNIEMOP_DEF(iemOp_vpsubsb_Vx_Hx_Wx)
5318{
5319 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSB, vpsubsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5320 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsb);
5321 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5322}
5323
5324
5325/* Opcode VEX.F3.0F 0xe8 - invalid */
5326/* Opcode VEX.F2.0F 0xe8 - invalid */
5327
5328/* Opcode VEX.0F 0xe9 - invalid */
5329
5330
5331/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
5332FNIEMOP_DEF(iemOp_vpsubsw_Vx_Hx_Wx)
5333{
5334 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSW, vpsubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5335 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsw);
5336 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5337}
5338
5339
5340/* Opcode VEX.F3.0F 0xe9 - invalid */
5341/* Opcode VEX.F2.0F 0xe9 - invalid */
5342
5343/* Opcode VEX.0F 0xea - invalid */
5344
5345
5346/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
5347FNIEMOP_DEF(iemOp_vpminsw_Vx_Hx_Wx)
5348{
5349 IEMOP_MNEMONIC3(VEX_RVM, VPMINSW, vpminsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5350 IEMOPMEDIAOPTF3_INIT_VARS(vpminsw);
5351 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5352}
5353
5354
5355/* Opcode VEX.F3.0F 0xea - invalid */
5356/* Opcode VEX.F2.0F 0xea - invalid */
5357
5358/* Opcode VEX.0F 0xeb - invalid */
5359
5360
5361/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
5362FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
5363{
5364 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5365 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
5366 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
5367}
5368
5369
5370
5371/* Opcode VEX.F3.0F 0xeb - invalid */
5372/* Opcode VEX.F2.0F 0xeb - invalid */
5373
5374/* Opcode VEX.0F 0xec - invalid */
5375
5376
5377/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
5378FNIEMOP_DEF(iemOp_vpaddsb_Vx_Hx_Wx)
5379{
5380 IEMOP_MNEMONIC3(VEX_RVM, VPADDSB, vpaddsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5381 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsb);
5382 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5383}
5384
5385
5386/* Opcode VEX.F3.0F 0xec - invalid */
5387/* Opcode VEX.F2.0F 0xec - invalid */
5388
5389/* Opcode VEX.0F 0xed - invalid */
5390
5391
5392/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
5393FNIEMOP_DEF(iemOp_vpaddsw_Vx_Hx_Wx)
5394{
5395 IEMOP_MNEMONIC3(VEX_RVM, VPADDSW, vpaddsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5396 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsw);
5397 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5398}
5399
5400
5401/* Opcode VEX.F3.0F 0xed - invalid */
5402/* Opcode VEX.F2.0F 0xed - invalid */
5403
5404/* Opcode VEX.0F 0xee - invalid */
5405
5406
5407/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, Wx */
5408FNIEMOP_DEF(iemOp_vpmaxsw_Vx_Hx_Wx)
5409{
5410 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSW, vpmaxsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5411 IEMOPMEDIAOPTF3_INIT_VARS(vpmaxsw);
5412 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5413}
5414
5415
5416/* Opcode VEX.F3.0F 0xee - invalid */
5417/* Opcode VEX.F2.0F 0xee - invalid */
5418
5419
5420/* Opcode VEX.0F 0xef - invalid */
5421
5422
5423/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
5424FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
5425{
5426 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5427 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
5428 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
5429}
5430
5431
5432/* Opcode VEX.F3.0F 0xef - invalid */
5433/* Opcode VEX.F2.0F 0xef - invalid */
5434
5435/* Opcode VEX.0F 0xf0 - invalid */
5436/* Opcode VEX.66.0F 0xf0 - invalid */
5437
5438
5439/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
5440FNIEMOP_DEF(iemOp_vlddqu_Vx_Mx)
5441{
5442 IEMOP_MNEMONIC2(VEX_RM_MEM, VLDDQU, vlddqu, Vx_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5443 Assert(pVCpu->iem.s.uVexLength <= 1);
5444 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5445 if (IEM_IS_MODRM_REG_MODE(bRm))
5446 {
5447 /*
5448 * Register, register - (not implemented, assuming it raises \#UD).
5449 */
5450 IEMOP_RAISE_INVALID_OPCODE_RET();
5451 }
5452 else if (pVCpu->iem.s.uVexLength == 0)
5453 {
5454 /*
5455 * Register, memory128.
5456 */
5457 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5458 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5460
5461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5462 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5463 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5464 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5465
5466 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5467 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
5468
5469 IEM_MC_ADVANCE_RIP_AND_FINISH();
5470 IEM_MC_END();
5471 }
5472 else
5473 {
5474 /*
5475 * Register, memory256.
5476 */
5477 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5478 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5480
5481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5482 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5483 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5484 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5485
5486 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5487 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
5488
5489 IEM_MC_ADVANCE_RIP_AND_FINISH();
5490 IEM_MC_END();
5491 }
5492}
5493
5494
5495/* Opcode VEX.0F 0xf1 - invalid */
5496/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
5497FNIEMOP_DEF(iemOp_vpsllw_Vx_Hx_W)
5498{
5499 IEMOP_MNEMONIC3(VEX_RVM, VPSLLW, vpsllw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5500 IEMOPMEDIAOPTF3_INIT_VARS(vpsllw);
5501 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5502}
5503
5504/* Opcode VEX.F2.0F 0xf1 - invalid */
5505
5506/* Opcode VEX.0F 0xf2 - invalid */
5507/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
5508FNIEMOP_DEF(iemOp_vpslld_Vx_Hx_Wx)
5509{
5510 IEMOP_MNEMONIC3(VEX_RVM, VPSLLD, vpslld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5511 IEMOPMEDIAOPTF3_INIT_VARS(vpslld);
5512 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5513}
5514/* Opcode VEX.F2.0F 0xf2 - invalid */
5515
5516/* Opcode VEX.0F 0xf3 - invalid */
5517/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
5518FNIEMOP_DEF(iemOp_vpsllq_Vx_Hx_Wx)
5519{
5520 IEMOP_MNEMONIC3(VEX_RVM, VPSLLQ, vpsllq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5521 IEMOPMEDIAOPTF3_INIT_VARS(vpsllq);
5522 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5523}
5524/* Opcode VEX.F2.0F 0xf3 - invalid */
5525
5526/* Opcode VEX.0F 0xf4 - invalid */
5527
5528
5529/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
5530FNIEMOP_DEF(iemOp_vpmuludq_Vx_Hx_W)
5531{
5532 IEMOP_MNEMONIC3(VEX_RVM, VPMULUDQ, vpmuludq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5533 IEMOPMEDIAOPTF3_INIT_VARS(vpmuludq);
5534 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5535}
5536
5537
5538/* Opcode VEX.F2.0F 0xf4 - invalid */
5539
5540/* Opcode VEX.0F 0xf5 - invalid */
5541
5542
5543/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
5544FNIEMOP_DEF(iemOp_vpmaddwd_Vx_Hx_Wx)
5545{
5546 IEMOP_MNEMONIC3(VEX_RVM, VPMADDWD, vpmaddwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5547 IEMOPMEDIAOPTF3_INIT_VARS(vpmaddwd);
5548 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5549}
5550
5551
5552/* Opcode VEX.F2.0F 0xf5 - invalid */
5553
5554/* Opcode VEX.0F 0xf6 - invalid */
5555
5556
5557/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
5558FNIEMOP_DEF(iemOp_vpsadbw_Vx_Hx_Wx)
5559{
5560 IEMOP_MNEMONIC3(VEX_RVM, VPSADBW, vpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5561 IEMOPMEDIAOPTF3_INIT_VARS(vpsadbw);
5562 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5563}
5564
5565
5566/* Opcode VEX.F2.0F 0xf6 - invalid */
5567
5568/* Opcode VEX.0F 0xf7 - invalid */
5569
5570
5571/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
5572FNIEMOP_DEF(iemOp_vmaskmovdqu_Vdq_Udq)
5573{
5574// IEMOP_MNEMONIC2(RM, VMASKMOVDQU, vmaskmovdqu, Vdq, Udq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
5575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5576 if (IEM_IS_MODRM_REG_MODE(bRm))
5577 {
5578 /*
5579 * XMM, XMM, (implicit) [ ER]DI
5580 */
5581 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5582 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5583 IEM_MC_LOCAL( uint64_t, u64EffAddr);
5584 IEM_MC_LOCAL( RTUINT128U, u128Mem);
5585 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128Mem, u128Mem, 0);
5586 IEM_MC_ARG( PCRTUINT128U, puSrc, 1);
5587 IEM_MC_ARG( PCRTUINT128U, puMsk, 2);
5588 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5589 IEM_MC_PREPARE_AVX_USAGE();
5590
5591 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
5592 IEM_MC_FETCH_MEM_U128(u128Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
5593 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
5594 IEM_MC_REF_XREG_U128_CONST(puMsk, IEM_GET_MODRM_RM(pVCpu, bRm));
5595 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovdqu_u128, pu128Mem, puSrc, puMsk);
5596 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, u64EffAddr, u128Mem);
5597
5598 IEM_MC_ADVANCE_RIP_AND_FINISH();
5599 IEM_MC_END();
5600 }
5601 else
5602 {
5603 /* The memory, register encoding is invalid. */
5604 IEMOP_RAISE_INVALID_OPCODE_RET();
5605 }
5606}
5607
5608
5609/* Opcode VEX.F2.0F 0xf7 - invalid */
5610
5611/* Opcode VEX.0F 0xf8 - invalid */
5612
5613
5614/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
5615FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
5616{
5617 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5618 IEMOPMEDIAOPTF3_INIT_VARS( vpsubb);
5619 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5620}
5621
5622
5623/* Opcode VEX.F2.0F 0xf8 - invalid */
5624
5625/* Opcode VEX.0F 0xf9 - invalid */
5626
5627
5628/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
5629FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
5630{
5631 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5632 IEMOPMEDIAOPTF3_INIT_VARS( vpsubw);
5633 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5634}
5635
5636
5637/* Opcode VEX.F2.0F 0xf9 - invalid */
5638
5639/* Opcode VEX.0F 0xfa - invalid */
5640
5641
5642/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
5643FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
5644{
5645 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5646 IEMOPMEDIAOPTF3_INIT_VARS( vpsubd);
5647 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5648}
5649
5650
5651/* Opcode VEX.F2.0F 0xfa - invalid */
5652
5653/* Opcode VEX.0F 0xfb - invalid */
5654
5655
5656/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
5657FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
5658{
5659 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5660 IEMOPMEDIAOPTF3_INIT_VARS( vpsubq);
5661 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5662}
5663
5664
5665/* Opcode VEX.F2.0F 0xfb - invalid */
5666
5667/* Opcode VEX.0F 0xfc - invalid */
5668
5669
5670/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
5671FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
5672{
5673 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5674 IEMOPMEDIAOPTF3_INIT_VARS( vpaddb);
5675 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5676}
5677
5678
5679/* Opcode VEX.F2.0F 0xfc - invalid */
5680
5681/* Opcode VEX.0F 0xfd - invalid */
5682
5683
5684/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
5685FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
5686{
5687 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5688 IEMOPMEDIAOPTF3_INIT_VARS( vpaddw);
5689 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5690}
5691
5692
5693/* Opcode VEX.F2.0F 0xfd - invalid */
5694
5695/* Opcode VEX.0F 0xfe - invalid */
5696
5697
5698/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
5699FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
5700{
5701 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5702 IEMOPMEDIAOPTF3_INIT_VARS( vpaddd);
5703 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5704}
5705
5706
5707/* Opcode VEX.F2.0F 0xfe - invalid */
5708
5709
5710/** Opcode **** 0x0f 0xff - UD0 */
5711FNIEMOP_DEF(iemOp_vud0)
5712{
5713/** @todo testcase: vud0 */
5714 IEMOP_MNEMONIC(vud0, "vud0");
5715 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
5716 {
5717 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
5718 if (IEM_IS_MODRM_MEM_MODE(bRm))
5719 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
5720 }
5721 IEMOP_HLP_DONE_DECODING();
5722 IEMOP_RAISE_INVALID_OPCODE_RET();
5723}
5724
5725
5726
5727/**
5728 * VEX opcode map \#1.
5729 *
5730 * @sa g_apfnTwoByteMap
5731 */
5732const PFNIEMOP g_apfnVexMap1[] =
5733{
5734 /* no prefix, 066h prefix f3h prefix, f2h prefix */
5735 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
5736 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
5737 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
5738 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
5739 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
5740 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
5741 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
5742 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
5743 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
5744 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
5745 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
5746 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
5747 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
5748 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
5749 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
5750 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
5751
5752 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
5753 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
5754 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
5755 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5756 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5757 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5758 /* 0x16 */ iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpd_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
5759 /* 0x17 */ iemOp_vmovhps_Mq_Vq, iemOp_vmovhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5760 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
5761 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
5762 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
5763 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
5764 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
5765 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
5766 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
5767 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
5768
5769 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
5770 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
5771 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
5772 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
5773 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
5774 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
5775 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
5776 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
5777 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5778 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5779 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
5780 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5781 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
5782 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
5783 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5784 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5785
5786 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
5787 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
5788 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
5789 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
5790 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
5791 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
5792 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
5793 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
5794 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5795 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5796 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5797 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5798 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5799 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5800 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5801 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
5802
5803 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
5804 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
5805 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
5806 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
5807 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
5808 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
5809 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
5810 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
5811 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
5812 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
5813 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
5814 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
5815 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
5816 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
5817 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
5818 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
5819
5820 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5821 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
5822 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5823 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
5824 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5825 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5826 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5827 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5828 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
5829 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
5830 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
5831 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
5832 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
5833 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
5834 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
5835 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
5836
5837 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5838 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5839 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5840 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5841 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5842 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5843 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5844 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5845 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5846 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5847 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5848 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5849 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5850 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5851 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5852 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
5853
5854 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
5855 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5856 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5857 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5858 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5859 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5860 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5861 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5862 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
5863 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
5864 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
5865 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
5866 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
5867 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
5868 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
5869 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
5870
5871 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
5872 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
5873 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
5874 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
5875 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
5876 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
5877 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
5878 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
5879 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
5880 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
5881 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
5882 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
5883 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
5884 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
5885 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
5886 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
5887
5888 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
5889 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
5890 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
5891 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
5892 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
5893 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
5894 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
5895 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
5896 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
5897 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
5898 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
5899 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
5900 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
5901 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
5902 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
5903 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
5904
5905 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5906 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5907 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
5908 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5909 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
5910 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
5911 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
5912 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5913 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5914 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5915 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
5916 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
5917 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
5918 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
5919 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
5920 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
5921
5922 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5923 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5924 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
5925 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5926 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
5927 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
5928 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
5929 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5930 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5931 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5932 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
5933 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
5934 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
5935 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
5936 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
5937 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
5938
5939 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
5940 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
5941 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
5942 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
5943 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
5944 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
5945 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
5946 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
5947 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
5948 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
5949 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
5950 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
5951 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
5952 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
5953 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
5954 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
5955
5956 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
5957 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5958 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5959 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5960 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5961 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5962 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5963 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5964 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5965 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5966 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5967 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5968 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5969 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5970 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5971 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5972
5973 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5974 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5975 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5976 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5977 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5978 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5979 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
5980 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5981 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5982 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5983 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5984 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5985 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5986 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5987 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5988 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5989
5990 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
5991 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5992 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5993 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5994 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5995 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5996 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5997 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5998 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
5999 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6000 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6001 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6002 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6003 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6004 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
6005 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
6006};
6007AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
6008/** @} */
6009
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette