VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap1.cpp.h@ 105834

Last change on this file since 105834 was 105834, checked in by vboxsync, 3 months ago

Fix emulation of 'vmovq' instruction in 16/32-bit modes (acts as 'vmovd'); bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 298.8 KB
Line 
1/* $Id: IEMAllInstVexMap1.cpp.h 105834 2024-08-23 01:15:49Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstTwoByte0f.cpp.h is a legacy mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name VEX Opcode Map 1
33 * @{
34 */
35
36/**
37 * Common worker for AVX2 instructions on the forms:
38 * - vpxxx xmm0, xmm1, xmm2/mem128
39 * - vpxxx ymm0, ymm1, ymm2/mem256
40 *
41 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
42 */
43FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, PCIEMOPMEDIAF3, pImpl)
44{
45 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
46 if (IEM_IS_MODRM_REG_MODE(bRm))
47 {
48 /*
49 * Register, register.
50 */
51 if (pVCpu->iem.s.uVexLength)
52 {
53 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
54 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
55 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
56 IEM_MC_PREPARE_AVX_USAGE();
57
58 IEM_MC_LOCAL(X86YMMREG, uSrc1);
59 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc1, uSrc1, 1);
60 IEM_MC_FETCH_YREG_YMM(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
61 IEM_MC_LOCAL(X86YMMREG, uSrc2);
62 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc2, uSrc2, 2);
63 IEM_MC_FETCH_YREG_YMM(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
64 IEM_MC_LOCAL(X86YMMREG, uDst);
65 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
66 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
67 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
68 IEM_MC_ADVANCE_RIP_AND_FINISH();
69 IEM_MC_END();
70 }
71 else
72 {
73 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
74 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
75 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
76 IEM_MC_PREPARE_AVX_USAGE();
77
78 IEM_MC_LOCAL(X86XMMREG, uDst);
79 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
80 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
81 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
82 IEM_MC_ARG(PCX86XMMREG, puSrc2, 2);
83 IEM_MC_REF_XREG_XMM_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
84 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
85 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
86 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
87 IEM_MC_ADVANCE_RIP_AND_FINISH();
88 IEM_MC_END();
89 }
90 }
91 else
92 {
93 /*
94 * Register, memory.
95 */
96 if (pVCpu->iem.s.uVexLength)
97 {
98 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
99 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
101 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
102 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
103 IEM_MC_PREPARE_AVX_USAGE();
104
105 IEM_MC_LOCAL(X86YMMREG, uSrc2);
106 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc2, uSrc2, 2);
107 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
108 IEM_MC_LOCAL(X86YMMREG, uSrc1);
109 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc1, uSrc1, 1);
110 IEM_MC_FETCH_YREG_YMM(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
111 IEM_MC_LOCAL(X86YMMREG, uDst);
112 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
113 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
114 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
115 IEM_MC_ADVANCE_RIP_AND_FINISH();
116 IEM_MC_END();
117 }
118 else
119 {
120 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
121 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
123 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
124 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
125 IEM_MC_PREPARE_AVX_USAGE();
126
127 IEM_MC_LOCAL(X86XMMREG, uDst);
128 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
129 IEM_MC_LOCAL(X86XMMREG, uSrc2);
130 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc2, uSrc2, 2);
131 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
132 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
133 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
134
135 IEM_MC_CALL_AVX_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
136 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
137 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
138 IEM_MC_ADVANCE_RIP_AND_FINISH();
139 IEM_MC_END();
140 }
141 }
142}
143
144
145/**
146 * Common worker for scalar AVX/AVX2 instructions on the forms (addss,subss,etc.):
147 * - vxxxss xmm0, xmm1, xmm2/mem32
148 *
149 * Exceptions type 4. AVX cpuid check for 128-bit operation.
150 * Ignores VEX.L, from SDM:
151 * Software should ensure VADDSS is encoded with VEX.L=0.
152 * Encoding VADDSS with VEX.L=1 may encounter unpredictable behavior
153 * across different processor generations.
154 */
155FNIEMOP_DEF_1(iemOpCommonAvx_Vx_Hx_R32, PFNIEMAIMPLFPAVXF3U128R32, pfnU128)
156{
157 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
158 if (IEM_IS_MODRM_REG_MODE(bRm))
159 {
160 /*
161 * Register, register.
162 */
163 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
164 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
165 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
166 IEM_MC_PREPARE_AVX_USAGE();
167
168 IEM_MC_LOCAL(X86XMMREG, uDst);
169 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
170 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
171 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
172 IEM_MC_ARG(PCRTFLOAT32U, pr32Src2, 2);
173 IEM_MC_REF_XREG_R32_CONST(pr32Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
174 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr32Src2);
175 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
176 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
177 IEM_MC_ADVANCE_RIP_AND_FINISH();
178 IEM_MC_END();
179 }
180 else
181 {
182 /*
183 * Register, memory.
184 */
185 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
188 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
189 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
190 IEM_MC_PREPARE_AVX_USAGE();
191
192 IEM_MC_LOCAL(RTFLOAT32U, r32Src2);
193 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src2, r32Src2, 2);
194 IEM_MC_FETCH_MEM_R32(r32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
195 IEM_MC_LOCAL(X86XMMREG, uDst);
196 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
197 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
198 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
199 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr32Src2);
200 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
201 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
202 IEM_MC_ADVANCE_RIP_AND_FINISH();
203 IEM_MC_END();
204 }
205}
206
207
208/**
209 * Common worker for scalar AVX/AVX2 instructions on the forms (addsd,subsd,etc.):
210 * - vxxxsd xmm0, xmm1, xmm2/mem64
211 *
212 * Exceptions type 4. AVX cpuid check for 128-bit operation.
213 * Ignores VEX.L, from SDM:
214 * Software should ensure VADDSD is encoded with VEX.L=0.
215 * Encoding VADDSD with VEX.L=1 may encounter unpredictable behavior
216 * across different processor generations.
217 */
218FNIEMOP_DEF_1(iemOpCommonAvx_Vx_Hx_R64, PFNIEMAIMPLFPAVXF3U128R64, pfnU128)
219{
220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
221 if (IEM_IS_MODRM_REG_MODE(bRm))
222 {
223 /*
224 * Register, register.
225 */
226 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
227 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
228 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
229 IEM_MC_PREPARE_AVX_USAGE();
230
231 IEM_MC_LOCAL(X86XMMREG, uDst);
232 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
233 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
234 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
235 IEM_MC_ARG(PCRTFLOAT64U, pr64Src2, 2);
236 IEM_MC_REF_XREG_R64_CONST(pr64Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
237 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr64Src2);
238 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
239 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
240 IEM_MC_ADVANCE_RIP_AND_FINISH();
241 IEM_MC_END();
242 }
243 else
244 {
245 /*
246 * Register, memory.
247 */
248 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
251 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
252 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
253 IEM_MC_PREPARE_AVX_USAGE();
254
255 IEM_MC_LOCAL(RTFLOAT64U, r64Src2);
256 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src2, r64Src2, 2);
257 IEM_MC_FETCH_MEM_R64(r64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
258 IEM_MC_LOCAL(X86XMMREG, uDst);
259 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
260 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
261 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
262 IEM_MC_CALL_AVX_AIMPL_3(pfnU128, puDst, puSrc1, pr64Src2);
263 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
264 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
265 IEM_MC_ADVANCE_RIP_AND_FINISH();
266 IEM_MC_END();
267 }
268}
269
270
271/**
272 * Common worker for AVX2 instructions on the forms:
273 * - vpxxx xmm0, xmm1, xmm2/mem128
274 * - vpxxx ymm0, ymm1, ymm2/mem256
275 *
276 * Takes function table for function w/o implicit state parameter.
277 *
278 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
279 */
280FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, PCIEMOPMEDIAOPTF3, pImpl)
281{
282 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
283 if (IEM_IS_MODRM_REG_MODE(bRm))
284 {
285 /*
286 * Register, register.
287 */
288 if (pVCpu->iem.s.uVexLength)
289 {
290 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
291 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
292 IEM_MC_LOCAL(RTUINT256U, uDst);
293 IEM_MC_LOCAL(RTUINT256U, uSrc1);
294 IEM_MC_LOCAL(RTUINT256U, uSrc2);
295 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
296 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
297 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
298 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
299 IEM_MC_PREPARE_AVX_USAGE();
300 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
301 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
302 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
303 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
304 IEM_MC_ADVANCE_RIP_AND_FINISH();
305 IEM_MC_END();
306 }
307 else
308 {
309 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
310 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
311 IEM_MC_ARG(PRTUINT128U, puDst, 0);
312 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
313 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2);
314 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
315 IEM_MC_PREPARE_AVX_USAGE();
316 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
317 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
318 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
319 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
320 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
321 IEM_MC_ADVANCE_RIP_AND_FINISH();
322 IEM_MC_END();
323 }
324 }
325 else
326 {
327 /*
328 * Register, memory.
329 */
330 if (pVCpu->iem.s.uVexLength)
331 {
332 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
333 IEM_MC_LOCAL(RTUINT256U, uDst);
334 IEM_MC_LOCAL(RTUINT256U, uSrc1);
335 IEM_MC_LOCAL(RTUINT256U, uSrc2);
336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
337 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
338 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1);
339 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2);
340
341 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
342 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
343 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
344 IEM_MC_PREPARE_AVX_USAGE();
345
346 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
347 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
348 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU256, puDst, puSrc1, puSrc2);
349 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
350
351 IEM_MC_ADVANCE_RIP_AND_FINISH();
352 IEM_MC_END();
353 }
354 else
355 {
356 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
357 IEM_MC_LOCAL(RTUINT128U, uSrc2);
358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
359 IEM_MC_ARG(PRTUINT128U, puDst, 0);
360 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1);
361 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2);
362
363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
364 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
365 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
366 IEM_MC_PREPARE_AVX_USAGE();
367
368 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
369 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
370 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnU128, puDst, puSrc1, puSrc2);
372 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
373
374 IEM_MC_ADVANCE_RIP_AND_FINISH();
375 IEM_MC_END();
376 }
377 }
378}
379
380
381/**
382 * Common worker for AVX2 instructions on the forms:
383 * - vpunpckhxx xmm0, xmm1, xmm2/mem128
384 * - vpunpckhxx ymm0, ymm1, ymm2/mem256
385 *
386 * The 128-bit memory version of this instruction may elect to skip fetching the
387 * lower 64 bits of the operand. We, however, do not.
388 *
389 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
390 */
391FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, PCIEMOPMEDIAOPTF3, pImpl)
392{
393 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
394}
395
396
397/**
398 * Common worker for AVX2 instructions on the forms:
399 * - vpunpcklxx xmm0, xmm1, xmm2/mem128
400 * - vpunpcklxx ymm0, ymm1, ymm2/mem256
401 *
402 * The 128-bit memory version of this instruction may elect to skip fetching the
403 * higher 64 bits of the operand. We, however, do not.
404 *
405 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
406 */
407FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, PCIEMOPMEDIAOPTF3, pImpl)
408{
409 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, pImpl);
410}
411
412
413/**
414 * Common worker for AVX2 instructions on the forms:
415 * - vpxxx xmm0, xmm1/mem128
416 * - vpxxx ymm0, ymm1/mem256
417 *
418 * Takes function table for function w/o implicit state parameter.
419 *
420 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
421 */
422FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx_Opt, PCIEMOPMEDIAOPTF2, pImpl)
423{
424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
425 if (IEM_IS_MODRM_REG_MODE(bRm))
426 {
427 /*
428 * Register, register.
429 */
430 if (pVCpu->iem.s.uVexLength)
431 {
432 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
433 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
434 IEM_MC_LOCAL(RTUINT256U, uDst);
435 IEM_MC_LOCAL(RTUINT256U, uSrc);
436 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
437 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
438 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
439 IEM_MC_PREPARE_AVX_USAGE();
440 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
441 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
442 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
443 IEM_MC_ADVANCE_RIP_AND_FINISH();
444 IEM_MC_END();
445 }
446 else
447 {
448 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
449 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
450 IEM_MC_ARG(PRTUINT128U, puDst, 0);
451 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
452 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
453 IEM_MC_PREPARE_AVX_USAGE();
454 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
455 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
456 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
457 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
458 IEM_MC_ADVANCE_RIP_AND_FINISH();
459 IEM_MC_END();
460 }
461 }
462 else
463 {
464 /*
465 * Register, memory.
466 */
467 if (pVCpu->iem.s.uVexLength)
468 {
469 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
470 IEM_MC_LOCAL(RTUINT256U, uDst);
471 IEM_MC_LOCAL(RTUINT256U, uSrc);
472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
473 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
474 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
475
476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
477 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
478 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
479 IEM_MC_PREPARE_AVX_USAGE();
480
481 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
482 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
483 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
484
485 IEM_MC_ADVANCE_RIP_AND_FINISH();
486 IEM_MC_END();
487 }
488 else
489 {
490 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
491 IEM_MC_LOCAL(RTUINT128U, uSrc);
492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
493 IEM_MC_ARG(PRTUINT128U, puDst, 0);
494 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
495
496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
497 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
498 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
499 IEM_MC_PREPARE_AVX_USAGE();
500
501 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
502 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
503 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
504 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
505
506 IEM_MC_ADVANCE_RIP_AND_FINISH();
507 IEM_MC_END();
508 }
509 }
510}
511
512
513/**
514 * Common worker for AVX/AVX2 instructions on the forms:
515 * - vpxxx xmm0, xmm1/mem128
516 * - vpxxx ymm0, ymm1/mem256
517 *
518 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
519 */
520FNIEMOP_DEF_1(iemOpCommonAvxAvx2_Vx_Wx, PCIEMOPMEDIAF2, pImpl)
521{
522 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
523 if (IEM_IS_MODRM_REG_MODE(bRm))
524 {
525 /*
526 * Register, register.
527 */
528 if (pVCpu->iem.s.uVexLength)
529 {
530 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
531 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
532 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
533 IEM_MC_PREPARE_AVX_USAGE();
534
535 IEM_MC_LOCAL(X86YMMREG, uSrc);
536 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
537 IEM_MC_FETCH_YREG_YMM(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
538 IEM_MC_LOCAL(X86YMMREG, uDst);
539 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
540 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
541 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
542 IEM_MC_ADVANCE_RIP_AND_FINISH();
543 IEM_MC_END();
544 }
545 else
546 {
547 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
548 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
549 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
550 IEM_MC_PREPARE_AVX_USAGE();
551
552 IEM_MC_LOCAL(X86XMMREG, uDst);
553 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
554 IEM_MC_ARG(PCX86XMMREG, puSrc, 1);
555 IEM_MC_REF_XREG_XMM_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
556 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
557 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
558 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
559 IEM_MC_ADVANCE_RIP_AND_FINISH();
560 IEM_MC_END();
561 }
562 }
563 else
564 {
565 /*
566 * Register, memory.
567 */
568 if (pVCpu->iem.s.uVexLength)
569 {
570 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
573 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
574 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
575 IEM_MC_PREPARE_AVX_USAGE();
576
577 IEM_MC_LOCAL(X86YMMREG, uSrc);
578 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
579 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
580 IEM_MC_LOCAL(X86YMMREG, uDst);
581 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
582 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU256, puDst, puSrc);
583 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
584 IEM_MC_ADVANCE_RIP_AND_FINISH();
585 IEM_MC_END();
586 }
587 else
588 {
589 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
592 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
593 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
594 IEM_MC_PREPARE_AVX_USAGE();
595
596 IEM_MC_LOCAL(X86XMMREG, uDst);
597 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
598 IEM_MC_LOCAL(X86XMMREG, uSrc);
599 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc, uSrc, 1);
600 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
601 IEM_MC_CALL_AVX_AIMPL_2(pImpl->pfnU128, puDst, puSrc);
602 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
603 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
604 IEM_MC_ADVANCE_RIP_AND_FINISH();
605 IEM_MC_END();
606 }
607 }
608}
609
610
611
612/* Opcode VEX.0F 0x00 - invalid */
613/* Opcode VEX.0F 0x01 - invalid */
614/* Opcode VEX.0F 0x02 - invalid */
615/* Opcode VEX.0F 0x03 - invalid */
616/* Opcode VEX.0F 0x04 - invalid */
617/* Opcode VEX.0F 0x05 - invalid */
618/* Opcode VEX.0F 0x06 - invalid */
619/* Opcode VEX.0F 0x07 - invalid */
620/* Opcode VEX.0F 0x08 - invalid */
621/* Opcode VEX.0F 0x09 - invalid */
622/* Opcode VEX.0F 0x0a - invalid */
623
624/** Opcode VEX.0F 0x0b. */
625FNIEMOP_DEF(iemOp_vud2)
626{
627 IEMOP_MNEMONIC(vud2, "vud2");
628 IEMOP_RAISE_INVALID_OPCODE_RET();
629}
630
631/* Opcode VEX.0F 0x0c - invalid */
632/* Opcode VEX.0F 0x0d - invalid */
633/* Opcode VEX.0F 0x0e - invalid */
634/* Opcode VEX.0F 0x0f - invalid */
635
636
637/**
638 * @opcode 0x10
639 * @oppfx none
640 * @opcpuid avx
641 * @opgroup og_avx_simdfp_datamove
642 * @opxcpttype 4UA
643 * @optest op1=1 op2=2 -> op1=2
644 * @optest op1=0 op2=-22 -> op1=-22
645 */
646FNIEMOP_DEF(iemOp_vmovups_Vps_Wps)
647{
648 IEMOP_MNEMONIC2(VEX_RM, VMOVUPS, vmovups, Vps_WO, Wps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
649 Assert(pVCpu->iem.s.uVexLength <= 1);
650 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
651 if (IEM_IS_MODRM_REG_MODE(bRm))
652 {
653 /*
654 * Register, register.
655 */
656 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
657 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
658 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
659 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
660 if (pVCpu->iem.s.uVexLength == 0)
661 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
662 IEM_GET_MODRM_RM(pVCpu, bRm));
663 else
664 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
665 IEM_GET_MODRM_RM(pVCpu, bRm));
666 IEM_MC_ADVANCE_RIP_AND_FINISH();
667 IEM_MC_END();
668 }
669 else if (pVCpu->iem.s.uVexLength == 0)
670 {
671 /*
672 * 128-bit: Register, Memory
673 */
674 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
675 IEM_MC_LOCAL(RTUINT128U, uSrc);
676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
677
678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
679 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
680 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
681 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
682
683 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
684 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
685
686 IEM_MC_ADVANCE_RIP_AND_FINISH();
687 IEM_MC_END();
688 }
689 else
690 {
691 /*
692 * 256-bit: Register, Memory
693 */
694 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
695 IEM_MC_LOCAL(RTUINT256U, uSrc);
696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
697
698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
699 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
700 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
701 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
702
703 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
704 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
705
706 IEM_MC_ADVANCE_RIP_AND_FINISH();
707 IEM_MC_END();
708 }
709}
710
711
712/**
713 * @opcode 0x10
714 * @oppfx 0x66
715 * @opcpuid avx
716 * @opgroup og_avx_simdfp_datamove
717 * @opxcpttype 4UA
718 * @optest op1=1 op2=2 -> op1=2
719 * @optest op1=0 op2=-22 -> op1=-22
720 */
721FNIEMOP_DEF(iemOp_vmovupd_Vpd_Wpd)
722{
723 IEMOP_MNEMONIC2(VEX_RM, VMOVUPD, vmovupd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
724 Assert(pVCpu->iem.s.uVexLength <= 1);
725 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
726 if (IEM_IS_MODRM_REG_MODE(bRm))
727 {
728 /*
729 * Register, register.
730 */
731 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
732 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
733 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
734 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
735 if (pVCpu->iem.s.uVexLength == 0)
736 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
737 IEM_GET_MODRM_RM(pVCpu, bRm));
738 else
739 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
740 IEM_GET_MODRM_RM(pVCpu, bRm));
741 IEM_MC_ADVANCE_RIP_AND_FINISH();
742 IEM_MC_END();
743 }
744 else if (pVCpu->iem.s.uVexLength == 0)
745 {
746 /*
747 * 128-bit: Memory, register.
748 */
749 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
750 IEM_MC_LOCAL(RTUINT128U, uSrc);
751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
752
753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
754 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
755 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
756 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
757
758 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
759 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
760
761 IEM_MC_ADVANCE_RIP_AND_FINISH();
762 IEM_MC_END();
763 }
764 else
765 {
766 /*
767 * 256-bit: Memory, register.
768 */
769 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
770 IEM_MC_LOCAL(RTUINT256U, uSrc);
771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
772
773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
774 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
775 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
776 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
777
778 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
779 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
780
781 IEM_MC_ADVANCE_RIP_AND_FINISH();
782 IEM_MC_END();
783 }
784}
785
786
787FNIEMOP_DEF(iemOp_vmovss_Vss_Hss_Wss)
788{
789 Assert(pVCpu->iem.s.uVexLength <= 1);
790 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
791 if (IEM_IS_MODRM_REG_MODE(bRm))
792 {
793 /**
794 * @opcode 0x10
795 * @oppfx 0xf3
796 * @opcodesub 11 mr/reg
797 * @opcpuid avx
798 * @opgroup og_avx_simdfp_datamerge
799 * @opxcpttype 5
800 * @optest op1=1 op2=0 op3=2 -> op1=2
801 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
802 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
803 * @optest op1=3 op2=-2 op3=0x77 -> op1=-8589934473
804 * @note HssHi refers to bits 127:32.
805 */
806 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
807 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
808 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
809 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
810 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
811 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
812 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
813 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
814 IEM_MC_ADVANCE_RIP_AND_FINISH();
815 IEM_MC_END();
816 }
817 else
818 {
819 /**
820 * @opdone
821 * @opcode 0x10
822 * @oppfx 0xf3
823 * @opcodesub !11 mr/reg
824 * @opcpuid avx
825 * @opgroup og_avx_simdfp_datamove
826 * @opxcpttype 5
827 * @opfunction iemOp_vmovss_Vss_Hss_Wss
828 * @optest op1=1 op2=2 -> op1=2
829 * @optest op1=0 op2=-22 -> op1=-22
830 */
831 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSS, vmovss, VssZx_WO, Md, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
832 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
833 IEM_MC_LOCAL(uint32_t, uSrc);
834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
835
836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
837 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
838 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
839 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
840
841 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
842 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
843
844 IEM_MC_ADVANCE_RIP_AND_FINISH();
845 IEM_MC_END();
846 }
847}
848
849
850FNIEMOP_DEF(iemOp_vmovsd_Vsd_Hsd_Wsd)
851{
852 Assert(pVCpu->iem.s.uVexLength <= 1);
853 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
854 if (IEM_IS_MODRM_REG_MODE(bRm))
855 {
856 /**
857 * @opcode 0x10
858 * @oppfx 0xf2
859 * @opcodesub 11 mr/reg
860 * @opcpuid avx
861 * @opgroup og_avx_simdfp_datamerge
862 * @opxcpttype 5
863 * @optest op1=1 op2=0 op3=2 -> op1=2
864 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
865 * @optest op1=3 op2=-1 op3=0x77 ->
866 * op1=0xffffffffffffffff0000000000000077
867 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x420000000000000077
868 */
869 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
870 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
871 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
872
873 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
874 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
875 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
876 IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
877 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
878 IEM_MC_ADVANCE_RIP_AND_FINISH();
879 IEM_MC_END();
880 }
881 else
882 {
883 /**
884 * @opdone
885 * @opcode 0x10
886 * @oppfx 0xf2
887 * @opcodesub !11 mr/reg
888 * @opcpuid avx
889 * @opgroup og_avx_simdfp_datamove
890 * @opxcpttype 5
891 * @opfunction iemOp_vmovsd_Vsd_Hsd_Wsd
892 * @optest op1=1 op2=2 -> op1=2
893 * @optest op1=0 op2=-22 -> op1=-22
894 */
895 IEMOP_MNEMONIC2(VEX_RM_MEM, VMOVSD, vmovsd, VsdZx_WO, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
896 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
897 IEM_MC_LOCAL(uint64_t, uSrc);
898 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
899
900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
901 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
902 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
903 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
904
905 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
906 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
907
908 IEM_MC_ADVANCE_RIP_AND_FINISH();
909 IEM_MC_END();
910 }
911}
912
913
914/**
915 * @opcode 0x11
916 * @oppfx none
917 * @opcpuid avx
918 * @opgroup og_avx_simdfp_datamove
919 * @opxcpttype 4UA
920 * @optest op1=1 op2=2 -> op1=2
921 * @optest op1=0 op2=-22 -> op1=-22
922 */
923FNIEMOP_DEF(iemOp_vmovups_Wps_Vps)
924{
925 IEMOP_MNEMONIC2(VEX_MR, VMOVUPS, vmovups, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
926 Assert(pVCpu->iem.s.uVexLength <= 1);
927 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
928 if (IEM_IS_MODRM_REG_MODE(bRm))
929 {
930 /*
931 * Register, register.
932 */
933 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
934 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
935 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
936 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
937 if (pVCpu->iem.s.uVexLength == 0)
938 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
939 IEM_GET_MODRM_REG(pVCpu, bRm));
940 else
941 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
942 IEM_GET_MODRM_REG(pVCpu, bRm));
943 IEM_MC_ADVANCE_RIP_AND_FINISH();
944 IEM_MC_END();
945 }
946 else if (pVCpu->iem.s.uVexLength == 0)
947 {
948 /*
949 * 128-bit: Memory, register.
950 */
951 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
952 IEM_MC_LOCAL(RTUINT128U, uSrc);
953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
954
955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
956 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
957 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
958 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
959
960 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
961 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
962
963 IEM_MC_ADVANCE_RIP_AND_FINISH();
964 IEM_MC_END();
965 }
966 else
967 {
968 /*
969 * 256-bit: Memory, register.
970 */
971 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
972 IEM_MC_LOCAL(RTUINT256U, uSrc);
973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
974
975 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
976 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
977 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
978 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
979
980 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
981 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
982
983 IEM_MC_ADVANCE_RIP_AND_FINISH();
984 IEM_MC_END();
985 }
986}
987
988
989/**
990 * @opcode 0x11
991 * @oppfx 0x66
992 * @opcpuid avx
993 * @opgroup og_avx_simdfp_datamove
994 * @opxcpttype 4UA
995 * @optest op1=1 op2=2 -> op1=2
996 * @optest op1=0 op2=-22 -> op1=-22
997 */
998FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd)
999{
1000 IEMOP_MNEMONIC2(VEX_MR, VMOVUPD, vmovupd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1001 Assert(pVCpu->iem.s.uVexLength <= 1);
1002 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1003 if (IEM_IS_MODRM_REG_MODE(bRm))
1004 {
1005 /*
1006 * Register, register.
1007 */
1008 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1009 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1010 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1011 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1012 if (pVCpu->iem.s.uVexLength == 0)
1013 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1014 IEM_GET_MODRM_REG(pVCpu, bRm));
1015 else
1016 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1017 IEM_GET_MODRM_REG(pVCpu, bRm));
1018 IEM_MC_ADVANCE_RIP_AND_FINISH();
1019 IEM_MC_END();
1020 }
1021 else if (pVCpu->iem.s.uVexLength == 0)
1022 {
1023 /*
1024 * 128-bit: Memory, register.
1025 */
1026 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1027 IEM_MC_LOCAL(RTUINT128U, uSrc);
1028 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1029
1030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1031 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1032 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1033 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1034
1035 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
1036 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1037
1038 IEM_MC_ADVANCE_RIP_AND_FINISH();
1039 IEM_MC_END();
1040 }
1041 else
1042 {
1043 /*
1044 * 256-bit: Memory, register.
1045 */
1046 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1047 IEM_MC_LOCAL(RTUINT256U, uSrc);
1048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1049
1050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1051 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1052 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1053 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1054
1055 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1056 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1057
1058 IEM_MC_ADVANCE_RIP_AND_FINISH();
1059 IEM_MC_END();
1060 }
1061}
1062
1063
1064FNIEMOP_DEF(iemOp_vmovss_Wss_Hss_Vss)
1065{
1066 Assert(pVCpu->iem.s.uVexLength <= 1);
1067 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1068 if (IEM_IS_MODRM_REG_MODE(bRm))
1069 {
1070 /**
1071 * @opcode 0x11
1072 * @oppfx 0xf3
1073 * @opcodesub 11 mr/reg
1074 * @opcpuid avx
1075 * @opgroup og_avx_simdfp_datamerge
1076 * @opxcpttype 5
1077 * @optest op1=1 op2=0 op3=2 -> op1=2
1078 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffea
1079 * @optest op1=3 op2=-1 op3=0x77 -> op1=-4294967177
1080 * @optest op1=3 op2=0x42 op3=0x77 -> op1=0x4200000077
1081 */
1082 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1083 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1084 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1085
1086 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1087 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1088 IEM_MC_MERGE_YREG_U32_U96_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm) /*U32*/,
1089 IEM_GET_MODRM_REG(pVCpu, bRm),
1090 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
1091 IEM_MC_ADVANCE_RIP_AND_FINISH();
1092 IEM_MC_END();
1093 }
1094 else
1095 {
1096 /**
1097 * @opdone
1098 * @opcode 0x11
1099 * @oppfx 0xf3
1100 * @opcodesub !11 mr/reg
1101 * @opcpuid avx
1102 * @opgroup og_avx_simdfp_datamove
1103 * @opxcpttype 5
1104 * @opfunction iemOp_vmovss_Vss_Hss_Wss
1105 * @optest op1=1 op2=2 -> op1=2
1106 * @optest op1=0 op2=-22 -> op1=-22
1107 */
1108 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSS, vmovss, Md_WO, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1109 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1110 IEM_MC_LOCAL(uint32_t, uSrc);
1111 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1112
1113 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1114 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1115 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1116 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1117
1118 IEM_MC_FETCH_YREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
1119 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1120
1121 IEM_MC_ADVANCE_RIP_AND_FINISH();
1122 IEM_MC_END();
1123 }
1124}
1125
1126
1127FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hsd_Vsd)
1128{
1129 Assert(pVCpu->iem.s.uVexLength <= 1);
1130 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1131 if (IEM_IS_MODRM_REG_MODE(bRm))
1132 {
1133 /**
1134 * @opcode 0x11
1135 * @oppfx 0xf2
1136 * @opcodesub 11 mr/reg
1137 * @opcpuid avx
1138 * @opgroup og_avx_simdfp_datamerge
1139 * @opxcpttype 5
1140 * @optest op1=1 op2=0 op3=2 -> op1=2
1141 * @optest op1=0 op2=0 op3=-22 -> op1=0xffffffffffffffea
1142 * @optest op1=3 op2=-1 op3=0x77 ->
1143 * op1=0xffffffffffffffff0000000000000077
1144 * @optest op2=0x42 op3=0x77 -> op1=0x420000000000000077
1145 */
1146 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1147 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1148 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
1149
1150 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1151 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1152 IEM_MC_MERGE_YREG_U64_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
1153 IEM_GET_MODRM_REG(pVCpu, bRm),
1154 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hss*/);
1155 IEM_MC_ADVANCE_RIP_AND_FINISH();
1156 IEM_MC_END();
1157 }
1158 else
1159 {
1160 /**
1161 * @opdone
1162 * @opcode 0x11
1163 * @oppfx 0xf2
1164 * @opcodesub !11 mr/reg
1165 * @opcpuid avx
1166 * @opgroup og_avx_simdfp_datamove
1167 * @opxcpttype 5
1168 * @opfunction iemOp_vmovsd_Wsd_Hsd_Vsd
1169 * @optest op1=1 op2=2 -> op1=2
1170 * @optest op1=0 op2=-22 -> op1=-22
1171 */
1172 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVSD, vmovsd, Mq_WO, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED);
1173 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1174 IEM_MC_LOCAL(uint64_t, uSrc);
1175 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1176
1177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1178 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1179 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1180 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1181
1182 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1183 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1184
1185 IEM_MC_ADVANCE_RIP_AND_FINISH();
1186 IEM_MC_END();
1187 }
1188}
1189
1190
1191FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps)
1192{
1193 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1194 if (IEM_IS_MODRM_REG_MODE(bRm))
1195 {
1196 /**
1197 * @opcode 0x12
1198 * @opcodesub 11 mr/reg
1199 * @oppfx none
1200 * @opcpuid avx
1201 * @opgroup og_avx_simdfp_datamerge
1202 * @opxcpttype 7LZ
1203 * @optest op2=0x2200220122022203
1204 * op3=0x3304330533063307
1205 * -> op1=0x22002201220222033304330533063307
1206 * @optest op2=-1 op3=-42 -> op1=-42
1207 * @note op3 and op2 are only the 8-byte high XMM register halfs.
1208 */
1209 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1210 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1211 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1212
1213 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1214 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1215 IEM_MC_MERGE_YREG_U64HI_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1216 IEM_GET_MODRM_RM(pVCpu, bRm),
1217 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1218
1219 IEM_MC_ADVANCE_RIP_AND_FINISH();
1220 IEM_MC_END();
1221 }
1222 else
1223 {
1224 /**
1225 * @opdone
1226 * @opcode 0x12
1227 * @opcodesub !11 mr/reg
1228 * @oppfx none
1229 * @opcpuid avx
1230 * @opgroup og_avx_simdfp_datamove
1231 * @opxcpttype 5LZ
1232 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps
1233 * @optest op1=1 op2=0 op3=0 -> op1=0
1234 * @optest op1=0 op2=-1 op3=-1 -> op1=-1
1235 * @optest op1=1 op2=2 op3=3 -> op1=0x20000000000000003
1236 * @optest op2=-1 op3=0x42 -> op1=0xffffffffffffffff0000000000000042
1237 */
1238 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPS, vmovlps, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1239
1240 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1241 IEM_MC_LOCAL(uint64_t, uSrc);
1242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1243
1244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1245 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1246 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1247 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1248
1249 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1250 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1251 uSrc,
1252 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1253
1254 IEM_MC_ADVANCE_RIP_AND_FINISH();
1255 IEM_MC_END();
1256 }
1257}
1258
1259
1260/**
1261 * @opcode 0x12
1262 * @opcodesub !11 mr/reg
1263 * @oppfx 0x66
1264 * @opcpuid avx
1265 * @opgroup og_avx_pcksclr_datamerge
1266 * @opxcpttype 5LZ
1267 * @optest op2=0 op3=2 -> op1=2
1268 * @optest op2=0x22 op3=0x33 -> op1=0x220000000000000033
1269 * @optest op2=0xfffffff0fffffff1 op3=0xeeeeeee8eeeeeee9
1270 * -> op1=0xfffffff0fffffff1eeeeeee8eeeeeee9
1271 */
1272FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq)
1273{
1274 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1275 if (IEM_IS_MODRM_MEM_MODE(bRm))
1276 {
1277 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVLPD, vmovlpd, Vq_WO, HqHi, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1278
1279 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1280 IEM_MC_LOCAL(uint64_t, uSrc);
1281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1282
1283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1284 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1285 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1286 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1287
1288 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1289 IEM_MC_MERGE_YREG_U64LOCAL_U64HI_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1290 uSrc,
1291 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1292
1293 IEM_MC_ADVANCE_RIP_AND_FINISH();
1294 IEM_MC_END();
1295 }
1296
1297 /**
1298 * @opdone
1299 * @opmnemonic udvex660f12m3
1300 * @opcode 0x12
1301 * @opcodesub 11 mr/reg
1302 * @oppfx 0x66
1303 * @opunused immediate
1304 * @opcpuid avx
1305 * @optest ->
1306 */
1307 else
1308 IEMOP_RAISE_INVALID_OPCODE_RET();
1309}
1310
1311
1312/**
1313 * @opcode 0x12
1314 * @oppfx 0xf3
1315 * @opcpuid avx
1316 * @opgroup og_avx_pcksclr_datamove
1317 * @opxcpttype 4
1318 * @optest vex.l==0 / op1=-1 op2=0xdddddddd00000002eeeeeeee00000001
1319 * -> op1=0x00000002000000020000000100000001
1320 * @optest vex.l==1 /
1321 * op2=0xbbbbbbbb00000004cccccccc00000003dddddddd00000002eeeeeeee00000001
1322 * -> op1=0x0000000400000004000000030000000300000002000000020000000100000001
1323 */
1324FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx)
1325{
1326 IEMOP_MNEMONIC2(VEX_RM, VMOVSLDUP, vmovsldup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1327 Assert(pVCpu->iem.s.uVexLength <= 1);
1328 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1329 if (IEM_IS_MODRM_REG_MODE(bRm))
1330 {
1331 /*
1332 * Register, register.
1333 */
1334 if (pVCpu->iem.s.uVexLength == 0)
1335 {
1336 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1337 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1338 IEM_MC_LOCAL(RTUINT128U, uSrc);
1339
1340 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1341 IEM_MC_PREPARE_AVX_USAGE();
1342
1343 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1344 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1345 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1346 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1347 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1348 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1349
1350 IEM_MC_ADVANCE_RIP_AND_FINISH();
1351 IEM_MC_END();
1352 }
1353 else
1354 {
1355 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1356 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1357 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1358 IEM_MC_PREPARE_AVX_USAGE();
1359
1360 IEM_MC_LOCAL(RTUINT256U, uSrc);
1361 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1362 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1363 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1364 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1365 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1366 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 4);
1367 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 4);
1368 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 6);
1369 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 6);
1370 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1371
1372 IEM_MC_ADVANCE_RIP_AND_FINISH();
1373 IEM_MC_END();
1374 }
1375 }
1376 else
1377 {
1378 /*
1379 * Register, memory.
1380 */
1381 if (pVCpu->iem.s.uVexLength == 0)
1382 {
1383 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1384 IEM_MC_LOCAL(RTUINT128U, uSrc);
1385 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1386
1387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1388 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1389 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1390 IEM_MC_PREPARE_AVX_USAGE();
1391
1392 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1393 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1394 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1395 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1396 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1397 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1398
1399 IEM_MC_ADVANCE_RIP_AND_FINISH();
1400 IEM_MC_END();
1401 }
1402 else
1403 {
1404 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1407 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1408 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1409 IEM_MC_PREPARE_AVX_USAGE();
1410
1411 IEM_MC_LOCAL(RTUINT256U, uSrc);
1412 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1413
1414 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 0);
1415 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 0);
1416 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 2);
1417 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 2);
1418 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 4);
1419 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 4);
1420 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 6);
1421 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 6);
1422 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1423
1424 IEM_MC_ADVANCE_RIP_AND_FINISH();
1425 IEM_MC_END();
1426 }
1427 }
1428}
1429
1430
1431/**
1432 * @opcode 0x12
1433 * @oppfx 0xf2
1434 * @opcpuid avx
1435 * @opgroup og_avx_pcksclr_datamove
1436 * @opxcpttype 5
1437 * @optest vex.l==0 / op2=0xddddddddeeeeeeee2222222211111111
1438 * -> op1=0x22222222111111112222222211111111
1439 * @optest vex.l==1 / op2=0xbbbbbbbbcccccccc4444444433333333ddddddddeeeeeeee2222222211111111
1440 * -> op1=0x4444444433333333444444443333333322222222111111112222222211111111
1441 */
1442FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx)
1443{
1444 IEMOP_MNEMONIC2(VEX_RM, VMOVDDUP, vmovddup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1445 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1446 if (IEM_IS_MODRM_REG_MODE(bRm))
1447 {
1448 /*
1449 * Register, register.
1450 */
1451 if (pVCpu->iem.s.uVexLength == 0)
1452 {
1453 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1454 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1455 IEM_MC_LOCAL(uint64_t, uSrc);
1456
1457 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1458 IEM_MC_PREPARE_AVX_USAGE();
1459
1460 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1461 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1462 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1463 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1464
1465 IEM_MC_ADVANCE_RIP_AND_FINISH();
1466 IEM_MC_END();
1467 }
1468 else
1469 {
1470 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1471 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1472 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1473 IEM_MC_PREPARE_AVX_USAGE();
1474
1475 IEM_MC_LOCAL(uint64_t, uSrc1);
1476 IEM_MC_LOCAL(uint64_t, uSrc2);
1477 IEM_MC_FETCH_YREG_U64(uSrc1, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/);
1478 IEM_MC_FETCH_YREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 2 /* a_iQword*/);
1479
1480 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc1);
1481 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc1);
1482 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 2 /* a_iQword*/, uSrc2);
1483 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 3 /* a_iQword*/, uSrc2);
1484 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1485
1486 IEM_MC_ADVANCE_RIP_AND_FINISH();
1487 IEM_MC_END();
1488 }
1489 }
1490 else
1491 {
1492 /*
1493 * Register, memory.
1494 */
1495 if (pVCpu->iem.s.uVexLength == 0)
1496 {
1497 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1499 IEM_MC_LOCAL(uint64_t, uSrc);
1500
1501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1502 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1503 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1504 IEM_MC_PREPARE_AVX_USAGE();
1505
1506 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1507 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc);
1508 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc);
1509 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1510
1511 IEM_MC_ADVANCE_RIP_AND_FINISH();
1512 IEM_MC_END();
1513 }
1514 else
1515 {
1516 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1518
1519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1520 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1521 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1522 IEM_MC_PREPARE_AVX_USAGE();
1523
1524 IEM_MC_LOCAL(RTUINT256U, uSrc);
1525 IEM_MC_FETCH_MEM_U256(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1526
1527 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQwDst*/, uSrc, 0 /*a_iQwSrc*/);
1528 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQwDst*/, uSrc, 0 /*a_iQwSrc*/);
1529 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2 /*a_iQwDst*/, uSrc, 2 /*a_iQwSrc*/);
1530 IEM_MC_STORE_YREG_U64_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3 /*a_iQwDst*/, uSrc, 2 /*a_iQwSrc*/);
1531 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1532
1533 IEM_MC_ADVANCE_RIP_AND_FINISH();
1534 IEM_MC_END();
1535 }
1536 }
1537}
1538
1539
1540/**
1541 * @opcode 0x13
1542 * @opcodesub !11 mr/reg
1543 * @oppfx none
1544 * @opcpuid avx
1545 * @opgroup og_avx_simdfp_datamove
1546 * @opxcpttype 5
1547 * @optest op1=1 op2=2 -> op1=2
1548 * @optest op1=0 op2=-42 -> op1=-42
1549 */
1550FNIEMOP_DEF(iemOp_vmovlps_Mq_Vq)
1551{
1552 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1553 if (IEM_IS_MODRM_MEM_MODE(bRm))
1554 {
1555 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPS, vmovlps, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1556
1557 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1558 IEM_MC_LOCAL(uint64_t, uSrc);
1559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1560
1561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1562 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1563 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1564 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1565
1566 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1567 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1568
1569 IEM_MC_ADVANCE_RIP_AND_FINISH();
1570 IEM_MC_END();
1571 }
1572
1573 /**
1574 * @opdone
1575 * @opmnemonic udvex0f13m3
1576 * @opcode 0x13
1577 * @opcodesub 11 mr/reg
1578 * @oppfx none
1579 * @opunused immediate
1580 * @opcpuid avx
1581 * @optest ->
1582 */
1583 else
1584 IEMOP_RAISE_INVALID_OPCODE_RET();
1585}
1586
1587
1588/**
1589 * @opcode 0x13
1590 * @opcodesub !11 mr/reg
1591 * @oppfx 0x66
1592 * @opcpuid avx
1593 * @opgroup og_avx_pcksclr_datamove
1594 * @opxcpttype 5
1595 * @optest op1=1 op2=2 -> op1=2
1596 * @optest op1=0 op2=-42 -> op1=-42
1597 */
1598FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq)
1599{
1600 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1601 if (IEM_IS_MODRM_MEM_MODE(bRm))
1602 {
1603 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVLPD, vmovlpd, Mq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1604 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1605 IEM_MC_LOCAL(uint64_t, uSrc);
1606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1607
1608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1609 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1610 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1611 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1612
1613 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
1614 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1615
1616 IEM_MC_ADVANCE_RIP_AND_FINISH();
1617 IEM_MC_END();
1618 }
1619
1620 /**
1621 * @opdone
1622 * @opmnemonic udvex660f13m3
1623 * @opcode 0x13
1624 * @opcodesub 11 mr/reg
1625 * @oppfx 0x66
1626 * @opunused immediate
1627 * @opcpuid avx
1628 * @optest ->
1629 */
1630 else
1631 IEMOP_RAISE_INVALID_OPCODE_RET();
1632}
1633
1634/* Opcode VEX.F3.0F 0x13 - invalid */
1635/* Opcode VEX.F2.0F 0x13 - invalid */
1636
1637/** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/
1638FNIEMOP_DEF(iemOp_vunpcklps_Vx_Hx_Wx)
1639{
1640 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPS, vunpcklps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1641 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklps);
1642 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1643}
1644
1645
1646/** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */
1647FNIEMOP_DEF(iemOp_vunpcklpd_Vx_Hx_Wx)
1648{
1649 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKLPD, vunpcklpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1650 IEMOPMEDIAOPTF3_INIT_VARS( vunpcklpd);
1651 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1652}
1653
1654
1655/* Opcode VEX.F3.0F 0x14 - invalid */
1656/* Opcode VEX.F2.0F 0x14 - invalid */
1657
1658
1659/** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */
1660FNIEMOP_DEF(iemOp_vunpckhps_Vx_Hx_Wx)
1661{
1662 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPS, vunpckhps, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1663 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhps);
1664 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1665}
1666
1667
1668/** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */
1669FNIEMOP_DEF(iemOp_vunpckhpd_Vx_Hx_Wx)
1670{
1671 IEMOP_MNEMONIC3(VEX_RVM, VUNPCKHPD, vunpckhpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
1672 IEMOPMEDIAOPTF3_INIT_VARS( vunpckhpd);
1673 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
1674}
1675
1676
1677/* Opcode VEX.F3.0F 0x15 - invalid */
1678/* Opcode VEX.F2.0F 0x15 - invalid */
1679
1680
1681FNIEMOP_DEF(iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq)
1682{
1683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1684 if (IEM_IS_MODRM_REG_MODE(bRm))
1685 {
1686 /**
1687 * @opcode 0x16
1688 * @opcodesub 11 mr/reg
1689 * @oppfx none
1690 * @opcpuid avx
1691 * @opgroup og_avx_simdfp_datamerge
1692 * @opxcpttype 7LZ
1693 */
1694 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVLHPS, vmovlhps, Vq_WO, Hq, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1695
1696 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1697 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1698
1699 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1700 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1701 IEM_MC_MERGE_YREG_U64LO_U64LO_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1702 IEM_GET_MODRM_RM(pVCpu, bRm),
1703 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/);
1704
1705 IEM_MC_ADVANCE_RIP_AND_FINISH();
1706 IEM_MC_END();
1707 }
1708 else
1709 {
1710 /**
1711 * @opdone
1712 * @opcode 0x16
1713 * @opcodesub !11 mr/reg
1714 * @oppfx none
1715 * @opcpuid avx
1716 * @opgroup og_avx_simdfp_datamove
1717 * @opxcpttype 5LZ
1718 * @opfunction iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq
1719 */
1720 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPS, vmovhps, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1721
1722 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1723 IEM_MC_LOCAL(uint64_t, uSrc);
1724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1725
1726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1727 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1728 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1729 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1730
1731 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1732 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1733 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1734 uSrc);
1735
1736 IEM_MC_ADVANCE_RIP_AND_FINISH();
1737 IEM_MC_END();
1738 }
1739}
1740
1741
1742/**
1743 * @opcode 0x16
1744 * @opcodesub !11 mr/reg
1745 * @oppfx 0x66
1746 * @opcpuid avx
1747 * @opgroup og_avx_pcksclr_datamerge
1748 * @opxcpttype 5LZ
1749 */
1750FNIEMOP_DEF(iemOp_vmovhpd_Vdq_Hq_Mq)
1751{
1752 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1753 if (IEM_IS_MODRM_MEM_MODE(bRm))
1754 {
1755 IEMOP_MNEMONIC3(VEX_RVM_MEM, VMOVHPD, vmovhpd, Vq_WO, Hq, Mq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1756
1757 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1758 IEM_MC_LOCAL(uint64_t, uSrc);
1759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1760
1761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1762 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
1763 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1764 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
1765
1766 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1767 IEM_MC_MERGE_YREG_U64LO_U64LOCAL_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
1768 IEM_GET_EFFECTIVE_VVVV(pVCpu) /*Hq*/,
1769 uSrc);
1770
1771 IEM_MC_ADVANCE_RIP_AND_FINISH();
1772 IEM_MC_END();
1773 }
1774
1775 /**
1776 * @opdone
1777 * @opmnemonic udvex660f16m3
1778 * @opcode 0x12
1779 * @opcodesub 11 mr/reg
1780 * @oppfx 0x66
1781 * @opunused immediate
1782 * @opcpuid avx
1783 * @optest ->
1784 */
1785 else
1786 IEMOP_RAISE_INVALID_OPCODE_RET();
1787}
1788
1789
1790/** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */
1791/**
1792 * @opcode 0x16
1793 * @oppfx 0xf3
1794 * @opcpuid avx
1795 * @opgroup og_avx_pcksclr_datamove
1796 * @opxcpttype 4
1797 */
1798FNIEMOP_DEF(iemOp_vmovshdup_Vx_Wx)
1799{
1800 IEMOP_MNEMONIC2(VEX_RM, VMOVSHDUP, vmovshdup, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
1801 Assert(pVCpu->iem.s.uVexLength <= 1);
1802 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1803 if (IEM_IS_MODRM_REG_MODE(bRm))
1804 {
1805 /*
1806 * Register, register.
1807 */
1808 if (pVCpu->iem.s.uVexLength == 0)
1809 {
1810 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1811 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1812 IEM_MC_LOCAL(RTUINT128U, uSrc);
1813
1814 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1815 IEM_MC_PREPARE_AVX_USAGE();
1816
1817 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1818 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1819 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1820 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1821 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1822 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1823
1824 IEM_MC_ADVANCE_RIP_AND_FINISH();
1825 IEM_MC_END();
1826 }
1827 else
1828 {
1829 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1830 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1831 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1832 IEM_MC_PREPARE_AVX_USAGE();
1833
1834 IEM_MC_LOCAL(RTUINT256U, uSrc);
1835 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1836 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1837 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1838 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1839 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1840 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 5);
1841 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 5);
1842 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 7);
1843 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 7);
1844 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1845
1846 IEM_MC_ADVANCE_RIP_AND_FINISH();
1847 IEM_MC_END();
1848 }
1849 }
1850 else
1851 {
1852 /*
1853 * Register, memory.
1854 */
1855 if (pVCpu->iem.s.uVexLength == 0)
1856 {
1857 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1858 IEM_MC_LOCAL(RTUINT128U, uSrc);
1859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1860
1861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1862 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1863 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1864 IEM_MC_PREPARE_AVX_USAGE();
1865
1866 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1867 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1868 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1869 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1870 IEM_MC_STORE_XREG_U32_U128(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1871 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1872
1873 IEM_MC_ADVANCE_RIP_AND_FINISH();
1874 IEM_MC_END();
1875 }
1876 else
1877 {
1878 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1881 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
1882 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1883 IEM_MC_PREPARE_AVX_USAGE();
1884
1885 IEM_MC_LOCAL(RTUINT256U, uSrc);
1886 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1887
1888 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc, 1);
1889 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc, 1);
1890 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc, 3);
1891 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc, 3);
1892 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 4, uSrc, 5);
1893 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 5, uSrc, 5);
1894 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 6, uSrc, 7);
1895 IEM_MC_STORE_YREG_U32_U256(IEM_GET_MODRM_REG(pVCpu, bRm), 7, uSrc, 7);
1896 IEM_MC_CLEAR_ZREG_256_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
1897
1898 IEM_MC_ADVANCE_RIP_AND_FINISH();
1899 IEM_MC_END();
1900 }
1901 }
1902}
1903
1904
1905/* Opcode VEX.F2.0F 0x16 - invalid */
1906
1907
1908/**
1909 * @opcode 0x17
1910 * @opcodesub !11 mr/reg
1911 * @oppfx none
1912 * @opcpuid avx
1913 * @opgroup og_avx_simdfp_datamove
1914 * @opxcpttype 5
1915 */
1916FNIEMOP_DEF(iemOp_vmovhps_Mq_Vq)
1917{
1918 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1919 if (IEM_IS_MODRM_MEM_MODE(bRm))
1920 {
1921 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPS, vmovhps, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1922
1923 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1924 IEM_MC_LOCAL(uint64_t, uSrc);
1925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1926
1927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1928 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1929 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1930 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1931
1932 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1933 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1934
1935 IEM_MC_ADVANCE_RIP_AND_FINISH();
1936 IEM_MC_END();
1937 }
1938
1939 /**
1940 * @opdone
1941 * @opmnemonic udvex0f17m3
1942 * @opcode 0x17
1943 * @opcodesub 11 mr/reg
1944 * @oppfx none
1945 * @opunused immediate
1946 * @opcpuid avx
1947 * @optest ->
1948 */
1949 else
1950 IEMOP_RAISE_INVALID_OPCODE_RET();
1951}
1952
1953
1954/**
1955 * @opcode 0x17
1956 * @opcodesub !11 mr/reg
1957 * @oppfx 0x66
1958 * @opcpuid avx
1959 * @opgroup og_avx_pcksclr_datamove
1960 * @opxcpttype 5
1961 */
1962FNIEMOP_DEF(iemOp_vmovhpd_Mq_Vq)
1963{
1964 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1965 if (IEM_IS_MODRM_MEM_MODE(bRm))
1966 {
1967 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVHPD, vmovhpd, Mq_WO, VqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
1968
1969 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
1970 IEM_MC_LOCAL(uint64_t, uSrc);
1971 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1972
1973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1974 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
1975 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
1976 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
1977
1978 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /*a_iQWord*/);
1979 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
1980
1981 IEM_MC_ADVANCE_RIP_AND_FINISH();
1982 IEM_MC_END();
1983 }
1984
1985 /**
1986 * @opdone
1987 * @opmnemonic udvex660f17m3
1988 * @opcode 0x17
1989 * @opcodesub 11 mr/reg
1990 * @oppfx 0x66
1991 * @opunused immediate
1992 * @opcpuid avx
1993 * @optest ->
1994 */
1995 else
1996 IEMOP_RAISE_INVALID_OPCODE_RET();
1997}
1998
1999
2000/* Opcode VEX.F3.0F 0x17 - invalid */
2001/* Opcode VEX.F2.0F 0x17 - invalid */
2002
2003
2004/* Opcode VEX.0F 0x18 - invalid */
2005/* Opcode VEX.0F 0x19 - invalid */
2006/* Opcode VEX.0F 0x1a - invalid */
2007/* Opcode VEX.0F 0x1b - invalid */
2008/* Opcode VEX.0F 0x1c - invalid */
2009/* Opcode VEX.0F 0x1d - invalid */
2010/* Opcode VEX.0F 0x1e - invalid */
2011/* Opcode VEX.0F 0x1f - invalid */
2012
2013/* Opcode VEX.0F 0x20 - invalid */
2014/* Opcode VEX.0F 0x21 - invalid */
2015/* Opcode VEX.0F 0x22 - invalid */
2016/* Opcode VEX.0F 0x23 - invalid */
2017/* Opcode VEX.0F 0x24 - invalid */
2018/* Opcode VEX.0F 0x25 - invalid */
2019/* Opcode VEX.0F 0x26 - invalid */
2020/* Opcode VEX.0F 0x27 - invalid */
2021
2022/**
2023 * @opcode 0x28
2024 * @oppfx none
2025 * @opcpuid avx
2026 * @opgroup og_avx_pcksclr_datamove
2027 * @opxcpttype 1
2028 * @optest op1=1 op2=2 -> op1=2
2029 * @optest op1=0 op2=-42 -> op1=-42
2030 * @note Almost identical to vmovapd.
2031 */
2032FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps)
2033{
2034 IEMOP_MNEMONIC2(VEX_RM, VMOVAPS, vmovaps, Vps_WO, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2035 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2036 Assert(pVCpu->iem.s.uVexLength <= 1);
2037 if (IEM_IS_MODRM_REG_MODE(bRm))
2038 {
2039 /*
2040 * Register, register.
2041 */
2042 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2043 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2044
2045 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2046 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2047 if (pVCpu->iem.s.uVexLength == 0)
2048 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2049 IEM_GET_MODRM_RM(pVCpu, bRm));
2050 else
2051 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2052 IEM_GET_MODRM_RM(pVCpu, bRm));
2053 IEM_MC_ADVANCE_RIP_AND_FINISH();
2054 IEM_MC_END();
2055 }
2056 else
2057 {
2058 /*
2059 * Register, memory.
2060 */
2061 if (pVCpu->iem.s.uVexLength == 0)
2062 {
2063 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2065 IEM_MC_LOCAL(RTUINT128U, uSrc);
2066
2067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2068 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2069 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2070 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2071
2072 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2073 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2074
2075 IEM_MC_ADVANCE_RIP_AND_FINISH();
2076 IEM_MC_END();
2077 }
2078 else
2079 {
2080 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2082 IEM_MC_LOCAL(RTUINT256U, uSrc);
2083
2084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2085 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2086 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2087 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2088
2089 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2090 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2091
2092 IEM_MC_ADVANCE_RIP_AND_FINISH();
2093 IEM_MC_END();
2094 }
2095 }
2096}
2097
2098
2099/**
2100 * @opcode 0x28
2101 * @oppfx 66
2102 * @opcpuid avx
2103 * @opgroup og_avx_pcksclr_datamove
2104 * @opxcpttype 1
2105 * @optest op1=1 op2=2 -> op1=2
2106 * @optest op1=0 op2=-42 -> op1=-42
2107 * @note Almost identical to vmovaps
2108 */
2109FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd)
2110{
2111 IEMOP_MNEMONIC2(VEX_RM, VMOVAPD, vmovapd, Vpd_WO, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2112 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2113 Assert(pVCpu->iem.s.uVexLength <= 1);
2114 if (IEM_IS_MODRM_REG_MODE(bRm))
2115 {
2116 /*
2117 * Register, register.
2118 */
2119 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2120 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2121
2122 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2123 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2124 if (pVCpu->iem.s.uVexLength == 0)
2125 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2126 IEM_GET_MODRM_RM(pVCpu, bRm));
2127 else
2128 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
2129 IEM_GET_MODRM_RM(pVCpu, bRm));
2130 IEM_MC_ADVANCE_RIP_AND_FINISH();
2131 IEM_MC_END();
2132 }
2133 else
2134 {
2135 /*
2136 * Register, memory.
2137 */
2138 if (pVCpu->iem.s.uVexLength == 0)
2139 {
2140 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2142 IEM_MC_LOCAL(RTUINT128U, uSrc);
2143
2144 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2145 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2146 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2147 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2148
2149 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2150 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2151
2152 IEM_MC_ADVANCE_RIP_AND_FINISH();
2153 IEM_MC_END();
2154 }
2155 else
2156 {
2157 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2159 IEM_MC_LOCAL(RTUINT256U, uSrc);
2160
2161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2162 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2163 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2164 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2165
2166 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2167 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
2168
2169 IEM_MC_ADVANCE_RIP_AND_FINISH();
2170 IEM_MC_END();
2171 }
2172 }
2173}
2174
2175/**
2176 * @opmnemonic udvexf30f28
2177 * @opcode 0x28
2178 * @oppfx 0xf3
2179 * @opunused vex.modrm
2180 * @opcpuid avx
2181 * @optest ->
2182 * @opdone
2183 */
2184
2185/**
2186 * @opmnemonic udvexf20f28
2187 * @opcode 0x28
2188 * @oppfx 0xf2
2189 * @opunused vex.modrm
2190 * @opcpuid avx
2191 * @optest ->
2192 * @opdone
2193 */
2194
2195/**
2196 * @opcode 0x29
2197 * @oppfx none
2198 * @opcpuid avx
2199 * @opgroup og_avx_pcksclr_datamove
2200 * @opxcpttype 1
2201 * @optest op1=1 op2=2 -> op1=2
2202 * @optest op1=0 op2=-42 -> op1=-42
2203 * @note Almost identical to vmovapd.
2204 */
2205FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps)
2206{
2207 IEMOP_MNEMONIC2(VEX_MR, VMOVAPS, vmovaps, Wps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2208 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2209 Assert(pVCpu->iem.s.uVexLength <= 1);
2210 if (IEM_IS_MODRM_REG_MODE(bRm))
2211 {
2212 /*
2213 * Register, register.
2214 */
2215 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2216 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2217
2218 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2219 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2220 if (pVCpu->iem.s.uVexLength == 0)
2221 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2222 IEM_GET_MODRM_REG(pVCpu, bRm));
2223 else
2224 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2225 IEM_GET_MODRM_REG(pVCpu, bRm));
2226 IEM_MC_ADVANCE_RIP_AND_FINISH();
2227 IEM_MC_END();
2228 }
2229 else
2230 {
2231 /*
2232 * Register, memory.
2233 */
2234 if (pVCpu->iem.s.uVexLength == 0)
2235 {
2236 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2238 IEM_MC_LOCAL(RTUINT128U, uSrc);
2239
2240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2241 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2242 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2243 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2244
2245 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2246 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2247
2248 IEM_MC_ADVANCE_RIP_AND_FINISH();
2249 IEM_MC_END();
2250 }
2251 else
2252 {
2253 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2255 IEM_MC_LOCAL(RTUINT256U, uSrc);
2256
2257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2258 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2259 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2260 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2261
2262 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2263 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2264
2265 IEM_MC_ADVANCE_RIP_AND_FINISH();
2266 IEM_MC_END();
2267 }
2268 }
2269}
2270
2271/**
2272 * @opcode 0x29
2273 * @oppfx 66
2274 * @opcpuid avx
2275 * @opgroup og_avx_pcksclr_datamove
2276 * @opxcpttype 1
2277 * @optest op1=1 op2=2 -> op1=2
2278 * @optest op1=0 op2=-42 -> op1=-42
2279 * @note Almost identical to vmovaps
2280 */
2281FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd)
2282{
2283 IEMOP_MNEMONIC2(VEX_MR, VMOVAPD, vmovapd, Wpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2284 Assert(pVCpu->iem.s.uVexLength <= 1);
2285 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2286 if (IEM_IS_MODRM_REG_MODE(bRm))
2287 {
2288 /*
2289 * Register, register.
2290 */
2291 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2292 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2293
2294 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2295 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2296 if (pVCpu->iem.s.uVexLength == 0)
2297 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2298 IEM_GET_MODRM_REG(pVCpu, bRm));
2299 else
2300 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
2301 IEM_GET_MODRM_REG(pVCpu, bRm));
2302 IEM_MC_ADVANCE_RIP_AND_FINISH();
2303 IEM_MC_END();
2304 }
2305 else
2306 {
2307 /*
2308 * Register, memory.
2309 */
2310 if (pVCpu->iem.s.uVexLength == 0)
2311 {
2312 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2314 IEM_MC_LOCAL(RTUINT128U, uSrc);
2315
2316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2317 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2318 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2319 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2320
2321 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
2322 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2323
2324 IEM_MC_ADVANCE_RIP_AND_FINISH();
2325 IEM_MC_END();
2326 }
2327 else
2328 {
2329 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2331 IEM_MC_LOCAL(RTUINT256U, uSrc);
2332
2333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2334 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2335 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2336 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
2337
2338 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2339 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2340
2341 IEM_MC_ADVANCE_RIP_AND_FINISH();
2342 IEM_MC_END();
2343 }
2344 }
2345}
2346
2347
2348/**
2349 * @opmnemonic udvexf30f29
2350 * @opcode 0x29
2351 * @oppfx 0xf3
2352 * @opunused vex.modrm
2353 * @opcpuid avx
2354 * @optest ->
2355 * @opdone
2356 */
2357
2358/**
2359 * @opmnemonic udvexf20f29
2360 * @opcode 0x29
2361 * @oppfx 0xf2
2362 * @opunused vex.modrm
2363 * @opcpuid avx
2364 * @optest ->
2365 * @opdone
2366 */
2367
2368
2369/** Opcode VEX.0F 0x2a - invalid */
2370/** Opcode VEX.66.0F 0x2a - invalid */
2371
2372
2373/** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */
2374FNIEMOP_DEF(iemOp_vcvtsi2ss_Vss_Hss_Ey)
2375{
2376 IEMOP_MNEMONIC3(VEX_RVM, VCVTSI2SS, vcvtsi2ss, Vps, Hps, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2377 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT();
2378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2379 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2380 {
2381 if (IEM_IS_MODRM_REG_MODE(bRm))
2382 {
2383 /* XMM, greg64 */
2384 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2385 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2386 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2387 IEM_MC_PREPARE_AVX_USAGE();
2388
2389 IEM_MC_LOCAL(X86XMMREG, uDst);
2390 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2391 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2392 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2393 IEM_MC_ARG(const int64_t *, pi64Src2, 2);
2394 IEM_MC_REF_GREG_I64_CONST(pi64Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
2395 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i64, iemAImpl_vcvtsi2ss_u128_i64_fallback),
2396 puDst, puSrc1, pi64Src2);
2397 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2398 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2399 IEM_MC_ADVANCE_RIP_AND_FINISH();
2400 IEM_MC_END();
2401 }
2402 else
2403 {
2404 /* XMM, [mem64] */
2405 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2408 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2409 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2410 IEM_MC_PREPARE_AVX_USAGE();
2411
2412 IEM_MC_LOCAL(X86XMMREG, uDst);
2413 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2414 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2415 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2416 IEM_MC_LOCAL(int64_t, i64Src2);
2417 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src2, i64Src2, 2);
2418 IEM_MC_FETCH_MEM_I64(i64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2419 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i64, iemAImpl_vcvtsi2ss_u128_i64_fallback),
2420 puDst, puSrc1, pi64Src2);
2421 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2422 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2423 IEM_MC_ADVANCE_RIP_AND_FINISH();
2424 IEM_MC_END();
2425 }
2426 }
2427 else
2428 {
2429 if (IEM_IS_MODRM_REG_MODE(bRm))
2430 {
2431 /* XMM, greg32 */
2432 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2433 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2434 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2435 IEM_MC_PREPARE_AVX_USAGE();
2436
2437 IEM_MC_LOCAL(X86XMMREG, uDst);
2438 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2439 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2440 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2441 IEM_MC_ARG(const int32_t *, pi32Src2, 2);
2442 IEM_MC_REF_GREG_I32_CONST(pi32Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
2443 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i32, iemAImpl_vcvtsi2ss_u128_i32_fallback),
2444 puDst, puSrc1, pi32Src2);
2445 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2446 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2447 IEM_MC_ADVANCE_RIP_AND_FINISH();
2448 IEM_MC_END();
2449 }
2450 else
2451 {
2452 /* XMM, [mem32] */
2453 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2455 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2456 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2457 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2458 IEM_MC_PREPARE_AVX_USAGE();
2459
2460 IEM_MC_LOCAL(X86XMMREG, uDst);
2461 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2462 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2463 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2464 IEM_MC_LOCAL(int32_t, i32Src2);
2465 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src2, i32Src2, 2);
2466 IEM_MC_FETCH_MEM_I32(i32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2467 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2ss_u128_i32, iemAImpl_vcvtsi2ss_u128_i32_fallback),
2468 puDst, puSrc1, pi32Src2);
2469 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2470 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2471 IEM_MC_ADVANCE_RIP_AND_FINISH();
2472 IEM_MC_END();
2473 }
2474 }
2475}
2476
2477
2478/** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */
2479FNIEMOP_DEF(iemOp_vcvtsi2sd_Vsd_Hsd_Ey)
2480{
2481 IEMOP_MNEMONIC3(VEX_RVM, VCVTSI2SD, vcvtsi2sd, Vpd, Hpd, Ey, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2482 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT();
2483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2484 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2485 {
2486 if (IEM_IS_MODRM_REG_MODE(bRm))
2487 {
2488 /* XMM, greg64 */
2489 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2490 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2491 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2492 IEM_MC_PREPARE_AVX_USAGE();
2493
2494 IEM_MC_LOCAL(X86XMMREG, uDst);
2495 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2496 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2497 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2498 IEM_MC_ARG(const int64_t *, pi64Src2, 2);
2499 IEM_MC_REF_GREG_I64_CONST(pi64Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
2500 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2sd_u128_i64, iemAImpl_vcvtsi2sd_u128_i64_fallback),
2501 puDst, puSrc1, pi64Src2);
2502 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2503 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2504 IEM_MC_ADVANCE_RIP_AND_FINISH();
2505 IEM_MC_END();
2506 }
2507 else
2508 {
2509 /* XMM, [mem64] */
2510 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
2511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2513 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2514 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2515 IEM_MC_PREPARE_AVX_USAGE();
2516
2517 IEM_MC_LOCAL(X86XMMREG, uDst);
2518 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2519 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2520 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2521 IEM_MC_LOCAL(int64_t, i64Src2);
2522 IEM_MC_ARG_LOCAL_REF(const int64_t *, pi64Src2, i64Src2, 2);
2523 IEM_MC_FETCH_MEM_I64(i64Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2524 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2sd_u128_i64, iemAImpl_vcvtsi2sd_u128_i64_fallback),
2525 puDst, puSrc1, pi64Src2);
2526 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2527 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2528 IEM_MC_ADVANCE_RIP_AND_FINISH();
2529 IEM_MC_END();
2530 }
2531 }
2532 else
2533 {
2534 if (IEM_IS_MODRM_REG_MODE(bRm))
2535 {
2536 /* XMM, greg32 */
2537 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2538 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2539 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2540 IEM_MC_PREPARE_AVX_USAGE();
2541
2542 IEM_MC_LOCAL(X86XMMREG, uDst);
2543 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2544 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2545 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2546 IEM_MC_ARG(const int32_t *, pi32Src2, 2);
2547 IEM_MC_REF_GREG_I32_CONST(pi32Src2, IEM_GET_MODRM_RM(pVCpu, bRm));
2548 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2sd_u128_i32, iemAImpl_vcvtsi2sd_u128_i32_fallback),
2549 puDst, puSrc1, pi32Src2);
2550 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2551 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2552 IEM_MC_ADVANCE_RIP_AND_FINISH();
2553 IEM_MC_END();
2554 }
2555 else
2556 {
2557 /* XMM, [mem32] */
2558 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2561 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
2562 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2563 IEM_MC_PREPARE_AVX_USAGE();
2564
2565 IEM_MC_LOCAL(X86XMMREG, uDst);
2566 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
2567 IEM_MC_ARG(PCX86XMMREG, puSrc1, 1);
2568 IEM_MC_REF_XREG_XMM_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
2569 IEM_MC_LOCAL(int32_t, i32Src2);
2570 IEM_MC_ARG_LOCAL_REF(const int32_t *, pi32Src2, i32Src2, 2);
2571 IEM_MC_FETCH_MEM_I32(i32Src2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2572 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsi2sd_u128_i32, iemAImpl_vcvtsi2sd_u128_i32_fallback),
2573 puDst, puSrc1, pi32Src2);
2574 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
2575 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
2576 IEM_MC_ADVANCE_RIP_AND_FINISH();
2577 IEM_MC_END();
2578 }
2579 }
2580}
2581
2582
2583/**
2584 * @opcode 0x2b
2585 * @opcodesub !11 mr/reg
2586 * @oppfx none
2587 * @opcpuid avx
2588 * @opgroup og_avx_cachect
2589 * @opxcpttype 1
2590 * @optest op1=1 op2=2 -> op1=2
2591 * @optest op1=0 op2=-42 -> op1=-42
2592 * @note Identical implementation to vmovntpd
2593 */
2594FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps)
2595{
2596 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPS, vmovntps, Mps_WO, Vps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2597 Assert(pVCpu->iem.s.uVexLength <= 1);
2598 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2599 if (IEM_IS_MODRM_MEM_MODE(bRm))
2600 {
2601 /*
2602 * memory, register.
2603 */
2604 if (pVCpu->iem.s.uVexLength == 0)
2605 {
2606 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2607 IEM_MC_LOCAL(RTUINT128U, uSrc);
2608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2609
2610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2611 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2612 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2613 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2614
2615 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2616 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2617
2618 IEM_MC_ADVANCE_RIP_AND_FINISH();
2619 IEM_MC_END();
2620 }
2621 else
2622 {
2623 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2624 IEM_MC_LOCAL(RTUINT256U, uSrc);
2625 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2626
2627 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2628 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2629 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2630 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2631
2632 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2633 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2634
2635 IEM_MC_ADVANCE_RIP_AND_FINISH();
2636 IEM_MC_END();
2637 }
2638 }
2639 /* The register, register encoding is invalid. */
2640 else
2641 IEMOP_RAISE_INVALID_OPCODE_RET();
2642}
2643
2644/**
2645 * @opcode 0x2b
2646 * @opcodesub !11 mr/reg
2647 * @oppfx 0x66
2648 * @opcpuid avx
2649 * @opgroup og_avx_cachect
2650 * @opxcpttype 1
2651 * @optest op1=1 op2=2 -> op1=2
2652 * @optest op1=0 op2=-42 -> op1=-42
2653 * @note Identical implementation to vmovntps
2654 */
2655FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd)
2656{
2657 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTPD, vmovntpd, Mpd_WO, Vpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
2658 Assert(pVCpu->iem.s.uVexLength <= 1);
2659 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2660 if (IEM_IS_MODRM_MEM_MODE(bRm))
2661 {
2662 /*
2663 * memory, register.
2664 */
2665 if (pVCpu->iem.s.uVexLength == 0)
2666 {
2667 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2668 IEM_MC_LOCAL(RTUINT128U, uSrc);
2669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2670
2671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2672 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2673 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2674 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2675
2676 IEM_MC_FETCH_XREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2677 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2678
2679 IEM_MC_ADVANCE_RIP_AND_FINISH();
2680 IEM_MC_END();
2681 }
2682 else
2683 {
2684 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2685 IEM_MC_LOCAL(RTUINT256U, uSrc);
2686 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2687
2688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2689 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
2690 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2691 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
2692
2693 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
2694 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
2695
2696 IEM_MC_ADVANCE_RIP_AND_FINISH();
2697 IEM_MC_END();
2698 }
2699 }
2700 /* The register, register encoding is invalid. */
2701 else
2702 IEMOP_RAISE_INVALID_OPCODE_RET();
2703}
2704
2705/**
2706 * @opmnemonic udvexf30f2b
2707 * @opcode 0x2b
2708 * @oppfx 0xf3
2709 * @opunused vex.modrm
2710 * @opcpuid avx
2711 * @optest ->
2712 * @opdone
2713 */
2714
2715/**
2716 * @opmnemonic udvexf20f2b
2717 * @opcode 0x2b
2718 * @oppfx 0xf2
2719 * @opunused vex.modrm
2720 * @opcpuid avx
2721 * @optest ->
2722 * @opdone
2723 */
2724
2725
2726/* Opcode VEX.0F 0x2c - invalid */
2727/* Opcode VEX.66.0F 0x2c - invalid */
2728
2729#define IEMOP_VCVTXSS2SI_Gy_Wss_BODY(a_Instr) \
2730 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT(); \
2731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2732 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2733 { \
2734 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2735 { \
2736 /* greg64, XMM */ \
2737 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2738 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2739 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2740 IEM_MC_PREPARE_AVX_USAGE(); \
2741 IEM_MC_LOCAL( int64_t, i64Dst); \
2742 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0); \
2743 IEM_MC_ARG( PCRTFLOAT32U, pr32Src, 1); \
2744 IEM_MC_REF_XREG_R32_CONST(pr32Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2745 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2746 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r32), \
2747 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r32_fallback)), \
2748 pi64Dst, pr32Src); \
2749 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst); \
2750 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2751 IEM_MC_END(); \
2752 } \
2753 else \
2754 { \
2755 /* greg64, [mem64] */ \
2756 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2759 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2760 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2761 IEM_MC_PREPARE_AVX_USAGE(); \
2762 IEM_MC_LOCAL(RTFLOAT32U, r32Src); \
2763 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src, r32Src, 1); \
2764 IEM_MC_FETCH_MEM_R32(r32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2765 IEM_MC_LOCAL( int64_t, i64Dst); \
2766 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0); \
2767 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2768 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r32), \
2769 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r32_fallback)), \
2770 pi64Dst, pr32Src); \
2771 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst); \
2772 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2773 IEM_MC_END(); \
2774 } \
2775 } \
2776 else \
2777 { \
2778 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2779 { \
2780 /* greg, XMM */ \
2781 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2782 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2783 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2784 IEM_MC_PREPARE_AVX_USAGE(); \
2785 IEM_MC_LOCAL( int32_t, i32Dst); \
2786 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0); \
2787 IEM_MC_ARG( PCRTFLOAT32U, pr32Src, 1); \
2788 IEM_MC_REF_XREG_R32_CONST(pr32Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2789 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2790 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r32), \
2791 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r32_fallback)), \
2792 pi32Dst, pr32Src); \
2793 IEM_MC_STORE_GREG_I32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst); \
2794 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2795 IEM_MC_END(); \
2796 } \
2797 else \
2798 { \
2799 /* greg, [mem] */ \
2800 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2803 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2804 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2805 IEM_MC_PREPARE_AVX_USAGE(); \
2806 IEM_MC_LOCAL(RTFLOAT32U, r32Src); \
2807 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Src, r32Src, 1); \
2808 IEM_MC_FETCH_MEM_R32(r32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2809 IEM_MC_LOCAL( int32_t, i32Dst); \
2810 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0); \
2811 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2812 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r32), \
2813 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r32_fallback)), \
2814 pi32Dst, pr32Src); \
2815 IEM_MC_STORE_GREG_I32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst); \
2816 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2817 IEM_MC_END(); \
2818 } \
2819 } \
2820 (void)0
2821
2822
2823#define IEMOP_VCVTXSD2SI_Gy_Wss_BODY(a_Instr) \
2824 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT(); \
2825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2826 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2827 { \
2828 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2829 { \
2830 /* greg64, XMM */ \
2831 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2832 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2833 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2834 IEM_MC_PREPARE_AVX_USAGE(); \
2835 IEM_MC_LOCAL( int64_t, i64Dst); \
2836 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0); \
2837 IEM_MC_ARG( PCRTFLOAT64U, pr64Src, 1); \
2838 IEM_MC_REF_XREG_R64_CONST(pr64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2839 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2840 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r64), \
2841 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r64_fallback)), \
2842 pi64Dst, pr64Src); \
2843 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst); \
2844 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2845 IEM_MC_END(); \
2846 } \
2847 else \
2848 { \
2849 /* greg64, [mem64] */ \
2850 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \
2851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2852 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2853 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2854 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2855 IEM_MC_PREPARE_AVX_USAGE(); \
2856 IEM_MC_LOCAL(RTFLOAT64U, r64Src); \
2857 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src, r64Src, 1); \
2858 IEM_MC_FETCH_MEM_R64(r64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2859 IEM_MC_LOCAL( int64_t, i64Dst); \
2860 IEM_MC_ARG_LOCAL_REF(int64_t *, pi64Dst, i64Dst, 0); \
2861 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2862 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r64), \
2863 RT_CONCAT3(iemAImpl_,a_Instr,_i64_r64_fallback)), \
2864 pi64Dst, pr64Src); \
2865 IEM_MC_STORE_GREG_I64(IEM_GET_MODRM_REG(pVCpu, bRm), i64Dst); \
2866 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2867 IEM_MC_END(); \
2868 } \
2869 } \
2870 else \
2871 { \
2872 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2873 { \
2874 /* greg, XMM */ \
2875 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2876 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2877 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2878 IEM_MC_PREPARE_AVX_USAGE(); \
2879 IEM_MC_LOCAL( int32_t, i32Dst); \
2880 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0); \
2881 IEM_MC_ARG( PCRTFLOAT64U, pr64Src, 1); \
2882 IEM_MC_REF_XREG_R64_CONST(pr64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2883 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2884 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r64), \
2885 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r64_fallback)), \
2886 pi32Dst, pr64Src); \
2887 IEM_MC_STORE_GREG_I32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst); \
2888 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2889 IEM_MC_END(); \
2890 } \
2891 else \
2892 { \
2893 /* greg, [mem] */ \
2894 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
2895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
2897 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
2898 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
2899 IEM_MC_PREPARE_AVX_USAGE(); \
2900 IEM_MC_LOCAL(RTFLOAT64U, r64Src); \
2901 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Src, r64Src, 1); \
2902 IEM_MC_FETCH_MEM_R64(r64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2903 IEM_MC_LOCAL( int32_t, i32Dst); \
2904 IEM_MC_ARG_LOCAL_REF(int32_t *, pi32Dst, i32Dst, 0); \
2905 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
2906 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r64), \
2907 RT_CONCAT3(iemAImpl_,a_Instr,_i32_r64_fallback)), \
2908 pi32Dst, pr64Src); \
2909 IEM_MC_STORE_GREG_I32(IEM_GET_MODRM_REG(pVCpu, bRm), i32Dst); \
2910 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2911 IEM_MC_END(); \
2912 } \
2913 } \
2914 (void)0
2915
2916
2917/** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */
2918FNIEMOP_DEF(iemOp_vcvttss2si_Gy_Wss)
2919{
2920 IEMOP_MNEMONIC2(VEX_RM, VCVTTSS2SI, vcvttss2si, Gy, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2921 IEMOP_VCVTXSS2SI_Gy_Wss_BODY( vcvttss2si);
2922}
2923
2924
2925/** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */
2926FNIEMOP_DEF(iemOp_vcvttsd2si_Gy_Wsd)
2927{
2928 IEMOP_MNEMONIC2(VEX_RM, VCVTTSD2SI, vcvttsd2si, Gy, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2929 IEMOP_VCVTXSD2SI_Gy_Wss_BODY( vcvttsd2si);
2930}
2931
2932
2933/* Opcode VEX.0F 0x2d - invalid */
2934/* Opcode VEX.66.0F 0x2d - invalid */
2935
2936
2937/** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */
2938FNIEMOP_DEF(iemOp_vcvtss2si_Gy_Wss)
2939{
2940 IEMOP_MNEMONIC2(VEX_RM, VCVTSS2SI, vcvtss2si, Gy, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2941 IEMOP_VCVTXSS2SI_Gy_Wss_BODY( vcvtss2si);
2942}
2943
2944
2945/** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */
2946FNIEMOP_DEF(iemOp_vcvtsd2si_Gy_Wsd)
2947{
2948 IEMOP_MNEMONIC2(VEX_RM, VCVTSD2SI, vcvtsd2si, Gy, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
2949 IEMOP_VCVTXSD2SI_Gy_Wss_BODY( vcvtsd2si);
2950}
2951
2952
2953
2954/**
2955 * @opcode 0x2e
2956 * @oppfx none
2957 * @opflmodify cf,pf,af,zf,sf,of
2958 * @opflclear af,sf,of
2959 */
2960FNIEMOP_DEF(iemOp_vucomiss_Vss_Wss)
2961{
2962 IEMOP_MNEMONIC2(VEX_RM, VUCOMISS, vucomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
2963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2964 if (IEM_IS_MODRM_REG_MODE(bRm))
2965 {
2966 /*
2967 * Register, register.
2968 */
2969 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2970 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
2971 IEM_MC_LOCAL(uint32_t, fEFlags);
2972 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2973 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2974 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2975 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
2976 IEM_MC_PREPARE_AVX_USAGE();
2977 IEM_MC_FETCH_EFLAGS(fEFlags);
2978 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
2979 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
2980 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
2981 pEFlags, uSrc1, uSrc2);
2982 IEM_MC_COMMIT_EFLAGS(fEFlags);
2983
2984 IEM_MC_ADVANCE_RIP_AND_FINISH();
2985 IEM_MC_END();
2986 }
2987 else
2988 {
2989 /*
2990 * Register, memory.
2991 */
2992 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
2993 IEM_MC_LOCAL(uint32_t, fEFlags);
2994 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
2995 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
2996 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
2997 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2998
2999 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3000 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3001 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3002 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3003
3004 IEM_MC_PREPARE_AVX_USAGE();
3005 IEM_MC_FETCH_EFLAGS(fEFlags);
3006 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
3007 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback),
3008 pEFlags, uSrc1, uSrc2);
3009 IEM_MC_COMMIT_EFLAGS(fEFlags);
3010
3011 IEM_MC_ADVANCE_RIP_AND_FINISH();
3012 IEM_MC_END();
3013 }
3014}
3015
3016
3017/**
3018 * @opcode 0x2e
3019 * @oppfx 0x66
3020 * @opflmodify cf,pf,af,zf,sf,of
3021 * @opflclear af,sf,of
3022 */
3023FNIEMOP_DEF(iemOp_vucomisd_Vsd_Wsd)
3024{
3025 IEMOP_MNEMONIC2(VEX_RM, VUCOMISD, vucomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3026 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3027 if (IEM_IS_MODRM_REG_MODE(bRm))
3028 {
3029 /*
3030 * Register, register.
3031 */
3032 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3033 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3034 IEM_MC_LOCAL(uint32_t, fEFlags);
3035 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3036 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
3037 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
3038 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3039 IEM_MC_PREPARE_AVX_USAGE();
3040 IEM_MC_FETCH_EFLAGS(fEFlags);
3041 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
3042 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
3043 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
3044 pEFlags, uSrc1, uSrc2);
3045 IEM_MC_COMMIT_EFLAGS(fEFlags);
3046
3047 IEM_MC_ADVANCE_RIP_AND_FINISH();
3048 IEM_MC_END();
3049 }
3050 else
3051 {
3052 /*
3053 * Register, memory.
3054 */
3055 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3056 IEM_MC_LOCAL(uint32_t, fEFlags);
3057 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3058 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
3059 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
3060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3061
3062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3063 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3064 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3065 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3066
3067 IEM_MC_PREPARE_AVX_USAGE();
3068 IEM_MC_FETCH_EFLAGS(fEFlags);
3069 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
3070 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback),
3071 pEFlags, uSrc1, uSrc2);
3072 IEM_MC_COMMIT_EFLAGS(fEFlags);
3073
3074 IEM_MC_ADVANCE_RIP_AND_FINISH();
3075 IEM_MC_END();
3076 }
3077}
3078
3079
3080/* Opcode VEX.F3.0F 0x2e - invalid */
3081/* Opcode VEX.F2.0F 0x2e - invalid */
3082
3083/**
3084 * @opcode 0x2f
3085 * @oppfx none
3086 * @opflmodify cf,pf,af,zf,sf,of
3087 * @opflclear af,sf,of
3088 */
3089FNIEMOP_DEF(iemOp_vcomiss_Vss_Wss)
3090{
3091 IEMOP_MNEMONIC2(VEX_RM, VCOMISS, vcomiss, Vss, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3092 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3093 if (IEM_IS_MODRM_REG_MODE(bRm))
3094 {
3095 /*
3096 * Register, register.
3097 */
3098 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3099 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3100 IEM_MC_LOCAL(uint32_t, fEFlags);
3101 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3102 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
3103 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
3104 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3105 IEM_MC_PREPARE_AVX_USAGE();
3106 IEM_MC_FETCH_EFLAGS(fEFlags);
3107 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
3108 IEM_MC_FETCH_XREG_R32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDWord*/);
3109 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
3110 pEFlags, uSrc1, uSrc2);
3111 IEM_MC_COMMIT_EFLAGS(fEFlags);
3112
3113 IEM_MC_ADVANCE_RIP_AND_FINISH();
3114 IEM_MC_END();
3115 }
3116 else
3117 {
3118 /*
3119 * Register, memory.
3120 */
3121 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3122 IEM_MC_LOCAL(uint32_t, fEFlags);
3123 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3124 IEM_MC_ARG(RTFLOAT32U, uSrc1, 1);
3125 IEM_MC_ARG(RTFLOAT32U, uSrc2, 2);
3126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3127
3128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3129 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3130 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3131 IEM_MC_FETCH_MEM_R32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3132
3133 IEM_MC_PREPARE_AVX_USAGE();
3134 IEM_MC_FETCH_EFLAGS(fEFlags);
3135 IEM_MC_FETCH_XREG_R32(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDWord*/);
3136 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback),
3137 pEFlags, uSrc1, uSrc2);
3138 IEM_MC_COMMIT_EFLAGS(fEFlags);
3139
3140 IEM_MC_ADVANCE_RIP_AND_FINISH();
3141 IEM_MC_END();
3142 }
3143}
3144
3145
3146/**
3147 * @opcode 0x2f
3148 * @oppfx 0x66
3149 * @opflmodify cf,pf,af,zf,sf,of
3150 * @opflclear af,sf,of
3151 */
3152FNIEMOP_DEF(iemOp_vcomisd_Vsd_Wsd)
3153{
3154 IEMOP_MNEMONIC2(VEX_RM, VCOMISD, vcomisd, Vsd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
3155 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3156 if (IEM_IS_MODRM_REG_MODE(bRm))
3157 {
3158 /*
3159 * Register, register.
3160 */
3161 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3162 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3163 IEM_MC_LOCAL(uint32_t, fEFlags);
3164 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3165 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
3166 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
3167 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3168 IEM_MC_PREPARE_AVX_USAGE();
3169 IEM_MC_FETCH_EFLAGS(fEFlags);
3170 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
3171 IEM_MC_FETCH_XREG_R64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iQWord*/);
3172 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
3173 pEFlags, uSrc1, uSrc2);
3174 IEM_MC_COMMIT_EFLAGS(fEFlags);
3175
3176 IEM_MC_ADVANCE_RIP_AND_FINISH();
3177 IEM_MC_END();
3178 }
3179 else
3180 {
3181 /*
3182 * Register, memory.
3183 */
3184 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3185 IEM_MC_LOCAL(uint32_t, fEFlags);
3186 IEM_MC_ARG_LOCAL_REF(uint32_t *, pEFlags, fEFlags, 0);
3187 IEM_MC_ARG(RTFLOAT64U, uSrc1, 1);
3188 IEM_MC_ARG(RTFLOAT64U, uSrc2, 2);
3189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3190
3191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3192 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
3193 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3194 IEM_MC_FETCH_MEM_R64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3195
3196 IEM_MC_PREPARE_AVX_USAGE();
3197 IEM_MC_FETCH_EFLAGS(fEFlags);
3198 IEM_MC_FETCH_XREG_R64(uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
3199 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback),
3200 pEFlags, uSrc1, uSrc2);
3201 IEM_MC_COMMIT_EFLAGS(fEFlags);
3202
3203 IEM_MC_ADVANCE_RIP_AND_FINISH();
3204 IEM_MC_END();
3205 }
3206}
3207
3208
3209/* Opcode VEX.F3.0F 0x2f - invalid */
3210/* Opcode VEX.F2.0F 0x2f - invalid */
3211
3212/* Opcode VEX.0F 0x30 - invalid */
3213/* Opcode VEX.0F 0x31 - invalid */
3214/* Opcode VEX.0F 0x32 - invalid */
3215/* Opcode VEX.0F 0x33 - invalid */
3216/* Opcode VEX.0F 0x34 - invalid */
3217/* Opcode VEX.0F 0x35 - invalid */
3218/* Opcode VEX.0F 0x36 - invalid */
3219/* Opcode VEX.0F 0x37 - invalid */
3220/* Opcode VEX.0F 0x38 - invalid */
3221/* Opcode VEX.0F 0x39 - invalid */
3222/* Opcode VEX.0F 0x3a - invalid */
3223/* Opcode VEX.0F 0x3b - invalid */
3224/* Opcode VEX.0F 0x3c - invalid */
3225/* Opcode VEX.0F 0x3d - invalid */
3226/* Opcode VEX.0F 0x3e - invalid */
3227/* Opcode VEX.0F 0x3f - invalid */
3228/* Opcode VEX.0F 0x40 - invalid */
3229/* Opcode VEX.0F 0x41 - invalid */
3230/* Opcode VEX.0F 0x42 - invalid */
3231/* Opcode VEX.0F 0x43 - invalid */
3232/* Opcode VEX.0F 0x44 - invalid */
3233/* Opcode VEX.0F 0x45 - invalid */
3234/* Opcode VEX.0F 0x46 - invalid */
3235/* Opcode VEX.0F 0x47 - invalid */
3236/* Opcode VEX.0F 0x48 - invalid */
3237/* Opcode VEX.0F 0x49 - invalid */
3238/* Opcode VEX.0F 0x4a - invalid */
3239/* Opcode VEX.0F 0x4b - invalid */
3240/* Opcode VEX.0F 0x4c - invalid */
3241/* Opcode VEX.0F 0x4d - invalid */
3242/* Opcode VEX.0F 0x4e - invalid */
3243/* Opcode VEX.0F 0x4f - invalid */
3244
3245
3246/** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */
3247FNIEMOP_DEF(iemOp_vmovmskps_Gy_Ups)
3248{
3249 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPS, vmovmskps, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3250 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3251 if (IEM_IS_MODRM_REG_MODE(bRm))
3252 {
3253 /*
3254 * Register, register.
3255 */
3256 if (pVCpu->iem.s.uVexLength == 0)
3257 {
3258 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3259 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
3260 IEM_MC_LOCAL(uint8_t, u8Dst);
3261 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
3262 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3263 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3264 IEM_MC_PREPARE_AVX_USAGE();
3265 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3266 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskps_u128, iemAImpl_vmovmskps_u128_fallback),
3267 pu8Dst, puSrc);
3268 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
3269 IEM_MC_ADVANCE_RIP_AND_FINISH();
3270 IEM_MC_END();
3271 }
3272 else
3273 {
3274 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3275 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
3276 IEM_MC_LOCAL(uint8_t, u8Dst);
3277 IEM_MC_LOCAL(RTUINT256U, uSrc);
3278 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
3279 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3280
3281 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3282 IEM_MC_PREPARE_AVX_USAGE();
3283 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3284 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskps_u256, iemAImpl_vmovmskps_u256_fallback),
3285 pu8Dst, puSrc);
3286 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
3287 IEM_MC_ADVANCE_RIP_AND_FINISH();
3288 IEM_MC_END();
3289 }
3290 }
3291 /* No memory operand. */
3292 else
3293 IEMOP_RAISE_INVALID_OPCODE_RET();
3294}
3295
3296
3297/** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */
3298FNIEMOP_DEF(iemOp_vmovmskpd_Gy_Upd)
3299{
3300 IEMOP_MNEMONIC2(VEX_RM_REG, VMOVMSKPD, vmovmskpd, Gd, Ux, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
3301 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3302 if (IEM_IS_MODRM_REG_MODE(bRm))
3303 {
3304 /*
3305 * Register, register.
3306 */
3307 if (pVCpu->iem.s.uVexLength == 0)
3308 {
3309 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3310 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
3311 IEM_MC_LOCAL(uint8_t, u8Dst);
3312 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
3313 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
3314 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3315 IEM_MC_PREPARE_AVX_USAGE();
3316 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3317 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmovmskpd_u128, iemAImpl_vmovmskpd_u128_fallback),
3318 pu8Dst, puSrc);
3319 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
3320 IEM_MC_ADVANCE_RIP_AND_FINISH();
3321 IEM_MC_END();
3322 }
3323 else
3324 {
3325 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3326 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
3327 IEM_MC_LOCAL(uint8_t, u8Dst);
3328 IEM_MC_LOCAL(RTUINT256U, uSrc);
3329 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Dst, u8Dst, 0);
3330 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
3331
3332 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3333 IEM_MC_PREPARE_AVX_USAGE();
3334 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3335 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vmovmskpd_u256, iemAImpl_vmovmskpd_u256_fallback),
3336 pu8Dst, puSrc);
3337 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u8Dst);
3338 IEM_MC_ADVANCE_RIP_AND_FINISH();
3339 IEM_MC_END();
3340 }
3341 }
3342 /* No memory operand. */
3343 else
3344 IEMOP_RAISE_INVALID_OPCODE_RET();
3345}
3346
3347
3348/* Opcode VEX.F3.0F 0x50 - invalid */
3349/* Opcode VEX.F2.0F 0x50 - invalid */
3350
3351/** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */
3352FNIEMOP_DEF(iemOp_vsqrtps_Vps_Wps)
3353{
3354 IEMOP_MNEMONIC2(VEX_RM, VSQRTPS, vsqrtps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3355 IEMOPMEDIAF2_INIT_VARS( vsqrtps);
3356 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3357}
3358
3359
3360/** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */
3361FNIEMOP_DEF(iemOp_vsqrtpd_Vpd_Wpd)
3362{
3363 IEMOP_MNEMONIC2(VEX_RM, VSQRTPD, vsqrtpd, Vpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3364 IEMOPMEDIAF2_INIT_VARS( vsqrtpd);
3365 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3366}
3367
3368
3369/** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */
3370FNIEMOP_DEF(iemOp_vsqrtss_Vss_Hss_Wss)
3371{
3372 IEMOP_MNEMONIC3(VEX_RVM, VSQRTSS, vsqrtss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3373 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3374 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsqrtss_u128_r32, iemAImpl_vsqrtss_u128_r32_fallback));
3375}
3376
3377
3378/** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */
3379FNIEMOP_DEF(iemOp_vsqrtsd_Vsd_Hsd_Wsd)
3380{
3381 IEMOP_MNEMONIC3(VEX_RVM, VSQRTSD, vsqrtsd, Vps, Hps, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3382 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3383 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsqrtsd_u128_r64, iemAImpl_vsqrtsd_u128_r64_fallback));
3384}
3385
3386
3387/** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */
3388FNIEMOP_DEF(iemOp_vrsqrtps_Vps_Wps)
3389{
3390 IEMOP_MNEMONIC2(VEX_RM, VRSQRTPS, vrsqrtps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3391 IEMOPMEDIAF2_INIT_VARS( vrsqrtps);
3392 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3393}
3394
3395
3396/* Opcode VEX.66.0F 0x52 - invalid */
3397
3398
3399/** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */
3400FNIEMOP_DEF(iemOp_vrsqrtss_Vss_Hss_Wss)
3401{
3402 IEMOP_MNEMONIC3(VEX_RVM, VRSQRTSS, vrsqrtss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3403 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3404 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vrsqrtss_u128_r32, iemAImpl_vrsqrtss_u128_r32_fallback));
3405}
3406
3407
3408/* Opcode VEX.F2.0F 0x52 - invalid */
3409
3410
3411/** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */
3412FNIEMOP_DEF(iemOp_vrcpps_Vps_Wps)
3413{
3414 IEMOP_MNEMONIC2(VEX_RM, VRCPPS, vrcpps, Vps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3415 IEMOPMEDIAF2_INIT_VARS( vrcpps);
3416 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3417}
3418
3419
3420/* Opcode VEX.66.0F 0x53 - invalid */
3421
3422
3423/** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */
3424FNIEMOP_DEF(iemOp_vrcpss_Vss_Hss_Wss)
3425{
3426 IEMOP_MNEMONIC3(VEX_RVM, VRCPSS, vrcpss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3427 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3428 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vrcpss_u128_r32, iemAImpl_vrcpss_u128_r32_fallback));
3429}
3430
3431
3432/* Opcode VEX.F2.0F 0x53 - invalid */
3433
3434
3435/** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */
3436FNIEMOP_DEF(iemOp_vandps_Vps_Hps_Wps)
3437{
3438 IEMOP_MNEMONIC3(VEX_RVM, VANDPS, vandps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3439 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3440 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
3441}
3442
3443
3444/** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */
3445FNIEMOP_DEF(iemOp_vandpd_Vpd_Hpd_Wpd)
3446{
3447 IEMOP_MNEMONIC3(VEX_RVM, VANDPD, vandpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3448 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3449 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
3450}
3451
3452
3453/* Opcode VEX.F3.0F 0x54 - invalid */
3454/* Opcode VEX.F2.0F 0x54 - invalid */
3455
3456
3457/** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */
3458FNIEMOP_DEF(iemOp_vandnps_Vps_Hps_Wps)
3459{
3460 IEMOP_MNEMONIC3(VEX_RVM, VANDNPS, vandnps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3461 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3462 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
3463}
3464
3465
3466/** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */
3467FNIEMOP_DEF(iemOp_vandnpd_Vpd_Hpd_Wpd)
3468{
3469 IEMOP_MNEMONIC3(VEX_RVM, VANDNPD, vandnpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3470 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3471 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
3472}
3473
3474
3475/* Opcode VEX.F3.0F 0x55 - invalid */
3476/* Opcode VEX.F2.0F 0x55 - invalid */
3477
3478/** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */
3479FNIEMOP_DEF(iemOp_vorps_Vps_Hps_Wps)
3480{
3481 IEMOP_MNEMONIC3(VEX_RVM, VORPS, vorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3482 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3483 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
3484}
3485
3486
3487/** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */
3488FNIEMOP_DEF(iemOp_vorpd_Vpd_Hpd_Wpd)
3489{
3490 IEMOP_MNEMONIC3(VEX_RVM, VORPD, vorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3491 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3492 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
3493}
3494
3495
3496/* Opcode VEX.F3.0F 0x56 - invalid */
3497/* Opcode VEX.F2.0F 0x56 - invalid */
3498
3499
3500/** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */
3501FNIEMOP_DEF(iemOp_vxorps_Vps_Hps_Wps)
3502{
3503 IEMOP_MNEMONIC3(VEX_RVM, VXORPS, vxorps, Vps, Hps, Wps, DISOPTYPE_HARMLESS, 0);
3504 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3505 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
3506}
3507
3508
3509/** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */
3510FNIEMOP_DEF(iemOp_vxorpd_Vpd_Hpd_Wpd)
3511{
3512 IEMOP_MNEMONIC3(VEX_RVM, VXORPD, vxorpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS, 0);
3513 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
3514 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
3515}
3516
3517
3518/* Opcode VEX.F3.0F 0x57 - invalid */
3519/* Opcode VEX.F2.0F 0x57 - invalid */
3520
3521
3522/** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */
3523FNIEMOP_DEF(iemOp_vaddps_Vps_Hps_Wps)
3524{
3525 IEMOP_MNEMONIC3(VEX_RVM, VADDPS, vaddps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3526 IEMOPMEDIAF3_INIT_VARS( vaddps);
3527 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3528}
3529
3530
3531/** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */
3532FNIEMOP_DEF(iemOp_vaddpd_Vpd_Hpd_Wpd)
3533{
3534 IEMOP_MNEMONIC3(VEX_RVM, VADDPD, vaddpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3535 IEMOPMEDIAF3_INIT_VARS( vaddpd);
3536 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3537}
3538
3539
3540/** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */
3541FNIEMOP_DEF(iemOp_vaddss_Vss_Hss_Wss)
3542{
3543 IEMOP_MNEMONIC3(VEX_RVM, VADDSS, vaddss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3544 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3545 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vaddss_u128_r32, iemAImpl_vaddss_u128_r32_fallback));
3546}
3547
3548
3549/** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */
3550FNIEMOP_DEF(iemOp_vaddsd_Vsd_Hsd_Wsd)
3551{
3552 IEMOP_MNEMONIC3(VEX_RVM, VADDSD, vaddsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3553 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3554 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vaddsd_u128_r64, iemAImpl_vaddsd_u128_r64_fallback));
3555}
3556
3557
3558/** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */
3559FNIEMOP_DEF(iemOp_vmulps_Vps_Hps_Wps)
3560{
3561 IEMOP_MNEMONIC3(VEX_RVM, VMULPS, vmulps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3562 IEMOPMEDIAF3_INIT_VARS( vmulps);
3563 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3564}
3565
3566
3567/** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */
3568FNIEMOP_DEF(iemOp_vmulpd_Vpd_Hpd_Wpd)
3569{
3570 IEMOP_MNEMONIC3(VEX_RVM, VMULPD, vmulpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3571 IEMOPMEDIAF3_INIT_VARS( vmulpd);
3572 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3573}
3574
3575
3576/** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */
3577FNIEMOP_DEF(iemOp_vmulss_Vss_Hss_Wss)
3578{
3579 IEMOP_MNEMONIC3(VEX_RVM, VMULSS, vmulss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3580 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3581 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmulss_u128_r32, iemAImpl_vmulss_u128_r32_fallback));
3582}
3583
3584
3585/** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */
3586FNIEMOP_DEF(iemOp_vmulsd_Vsd_Hsd_Wsd)
3587{
3588 IEMOP_MNEMONIC3(VEX_RVM, VMULSD, vmulsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3589 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3590 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmulsd_u128_r64, iemAImpl_vmulsd_u128_r64_fallback));
3591}
3592
3593
3594/** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */
3595FNIEMOP_DEF(iemOp_vcvtps2pd_Vpd_Wps)
3596{
3597 IEMOP_MNEMONIC2(VEX_RM, VCVTPS2PD, vcvtps2pd, Vpd, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3598 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3599 if (IEM_IS_MODRM_REG_MODE(bRm))
3600 {
3601 /*
3602 * Register, register.
3603 */
3604 if (pVCpu->iem.s.uVexLength)
3605 {
3606 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3607 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3608 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3609 IEM_MC_PREPARE_AVX_USAGE();
3610
3611 IEM_MC_ARG( PCX86XMMREG, puSrc, 1);
3612 IEM_MC_REF_XREG_XMM_CONST( puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3613 IEM_MC_LOCAL( X86YMMREG, uDst);
3614 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
3615 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3616 iemAImpl_vcvtps2pd_u256_u128,
3617 iemAImpl_vcvtps2pd_u256_u128_fallback),
3618 puDst, puSrc);
3619 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3620 IEM_MC_ADVANCE_RIP_AND_FINISH();
3621 IEM_MC_END();
3622 }
3623 else
3624 {
3625 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3626 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3627 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3628 IEM_MC_PREPARE_AVX_USAGE();
3629
3630 IEM_MC_ARG( const uint64_t *, pu64Src, 1);
3631 IEM_MC_REF_XREG_U64_CONST( pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
3632 IEM_MC_LOCAL( X86XMMREG, uDst);
3633 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
3634 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3635 iemAImpl_vcvtps2pd_u128_u64,
3636 iemAImpl_vcvtps2pd_u128_u64_fallback),
3637 puDst, pu64Src);
3638 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3639 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3640 IEM_MC_ADVANCE_RIP_AND_FINISH();
3641 IEM_MC_END();
3642 }
3643 }
3644 else
3645 {
3646 /*
3647 * Register, memory.
3648 */
3649 if (pVCpu->iem.s.uVexLength)
3650 {
3651 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3654 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3655 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3656 IEM_MC_PREPARE_AVX_USAGE();
3657
3658 IEM_MC_LOCAL(X86XMMREG, uSrc);
3659 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc, uSrc, 1);
3660 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3661 IEM_MC_LOCAL(X86YMMREG, uDst);
3662 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
3663 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3664 iemAImpl_vcvtps2pd_u256_u128,
3665 iemAImpl_vcvtps2pd_u256_u128_fallback),
3666 puDst, puSrc);
3667 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3668 IEM_MC_ADVANCE_RIP_AND_FINISH();
3669 IEM_MC_END();
3670 }
3671 else
3672 {
3673 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3676 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3677 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3678 IEM_MC_PREPARE_AVX_USAGE();
3679
3680 IEM_MC_LOCAL( uint64_t, u64Src);
3681 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
3682 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3683 IEM_MC_LOCAL( X86XMMREG, uDst);
3684 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
3685 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3686 iemAImpl_vcvtps2pd_u128_u64,
3687 iemAImpl_vcvtps2pd_u128_u64_fallback),
3688 puDst, pu64Src);
3689 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3690 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3691 IEM_MC_ADVANCE_RIP_AND_FINISH();
3692 IEM_MC_END();
3693 }
3694 }
3695}
3696
3697
3698/** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */
3699FNIEMOP_DEF(iemOp_vcvtpd2ps_Vps_Wpd)
3700{
3701 IEMOP_MNEMONIC2(VEX_RM, VCVTPD2PS, vcvtpd2ps, Vps, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3703 if (IEM_IS_MODRM_REG_MODE(bRm))
3704 {
3705 /*
3706 * Register, register.
3707 */
3708 if (pVCpu->iem.s.uVexLength)
3709 {
3710 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3711 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3712 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3713 IEM_MC_PREPARE_AVX_USAGE();
3714
3715 IEM_MC_LOCAL( X86YMMREG, uSrc);
3716 IEM_MC_FETCH_YREG_YMM(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3717 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
3718 IEM_MC_LOCAL( X86XMMREG, uDst);
3719 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
3720 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3721 iemAImpl_vcvtpd2ps_u128_u256,
3722 iemAImpl_vcvtpd2ps_u128_u256_fallback),
3723 puDst, puSrc);
3724 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3725 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
3726 IEM_MC_ADVANCE_RIP_AND_FINISH();
3727 IEM_MC_END();
3728 }
3729 else
3730 {
3731 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3732 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3733 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3734 IEM_MC_PREPARE_AVX_USAGE();
3735
3736 IEM_MC_ARG( PCX86XMMREG, puSrc, 1);
3737 IEM_MC_REF_XREG_XMM_CONST( puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
3738 IEM_MC_LOCAL( X86XMMREG, uDst);
3739 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
3740 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3741 iemAImpl_vcvtpd2ps_u128_u128,
3742 iemAImpl_vcvtpd2ps_u128_u128_fallback),
3743 puDst, puSrc);
3744 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3745 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
3746 IEM_MC_ADVANCE_RIP_AND_FINISH();
3747 IEM_MC_END();
3748 }
3749 }
3750 else
3751 {
3752 /*
3753 * Register, memory.
3754 */
3755 if (pVCpu->iem.s.uVexLength)
3756 {
3757 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3760 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3761 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3762 IEM_MC_PREPARE_AVX_USAGE();
3763
3764 IEM_MC_LOCAL( X86YMMREG, uSrc);
3765 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
3766 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3767 IEM_MC_LOCAL( X86XMMREG, uDst);
3768 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
3769 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3770 iemAImpl_vcvtpd2ps_u128_u256,
3771 iemAImpl_vcvtpd2ps_u128_u256_fallback),
3772 puDst, puSrc);
3773 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3774 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
3775 IEM_MC_ADVANCE_RIP_AND_FINISH();
3776 IEM_MC_END();
3777 }
3778 else
3779 {
3780 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
3781 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3783 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
3784 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
3785 IEM_MC_PREPARE_AVX_USAGE();
3786
3787 IEM_MC_LOCAL(X86XMMREG, uSrc);
3788 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc, uSrc, 1);
3789 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
3790 IEM_MC_LOCAL( X86XMMREG, uDst);
3791 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
3792 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
3793 iemAImpl_vcvtpd2ps_u128_u128,
3794 iemAImpl_vcvtpd2ps_u128_u128_fallback),
3795 puDst, puSrc);
3796 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
3797 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
3798 IEM_MC_ADVANCE_RIP_AND_FINISH();
3799 IEM_MC_END();
3800 }
3801 }
3802}
3803
3804
3805/** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */
3806FNIEMOP_DEF(iemOp_vcvtss2sd_Vsd_Hx_Wss)
3807{
3808 IEMOP_MNEMONIC3(VEX_RVM, VCVTSS2SD, vcvtss2sd, Vsd, Hx, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3809 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3810 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtss2sd_u128_r32, iemAImpl_vcvtss2sd_u128_r32_fallback));
3811}
3812
3813
3814/** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */
3815FNIEMOP_DEF(iemOp_vcvtsd2ss_Vss_Hx_Wsd)
3816{
3817 IEMOP_MNEMONIC3(VEX_RVM, VCVTSD2SS, vcvtsd2ss, Vss, Hx, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3818 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3819 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcvtsd2ss_u128_r64, iemAImpl_vcvtsd2ss_u128_r64_fallback));
3820}
3821
3822
3823/** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */
3824FNIEMOP_DEF(iemOp_vcvtdq2ps_Vps_Wdq)
3825{
3826 IEMOP_MNEMONIC2(VEX_RM, VCVTDQ2PS, vcvtdq2ps, Vps, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3827 IEMOPMEDIAF2_INIT_VARS( vcvtdq2ps);
3828 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3829}
3830
3831
3832/** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */
3833FNIEMOP_DEF(iemOp_vcvtps2dq_Vdq_Wps)
3834{
3835 IEMOP_MNEMONIC2(VEX_RM, VCVTPS2DQ, vcvtps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3836 IEMOPMEDIAF2_INIT_VARS( vcvtps2dq);
3837 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3838}
3839
3840
3841/** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */
3842FNIEMOP_DEF(iemOp_vcvttps2dq_Vdq_Wps)
3843{
3844 IEMOP_MNEMONIC2(VEX_RM, VCVTTPS2DQ, vcvttps2dq, Vdq, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3845 IEMOPMEDIAF2_INIT_VARS( vcvttps2dq);
3846 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3847}
3848
3849
3850/* Opcode VEX.F2.0F 0x5b - invalid */
3851
3852
3853/** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */
3854FNIEMOP_DEF(iemOp_vsubps_Vps_Hps_Wps)
3855{
3856 IEMOP_MNEMONIC3(VEX_RVM, VSUBPS, vsubps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3857 IEMOPMEDIAF3_INIT_VARS( vsubps);
3858 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3859}
3860
3861
3862/** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */
3863FNIEMOP_DEF(iemOp_vsubpd_Vpd_Hpd_Wpd)
3864{
3865 IEMOP_MNEMONIC3(VEX_RVM, VSUBPD, vsubpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3866 IEMOPMEDIAF3_INIT_VARS( vsubpd);
3867 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3868}
3869
3870
3871/** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */
3872FNIEMOP_DEF(iemOp_vsubss_Vss_Hss_Wss)
3873{
3874 IEMOP_MNEMONIC3(VEX_RVM, VSUBSS, vsubss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3875 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3876 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsubss_u128_r32, iemAImpl_vsubss_u128_r32_fallback));
3877}
3878
3879
3880/** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */
3881FNIEMOP_DEF(iemOp_vsubsd_Vsd_Hsd_Wsd)
3882{
3883 IEMOP_MNEMONIC3(VEX_RVM, VSUBSD, vsubsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3884 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3885 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vsubsd_u128_r64, iemAImpl_vsubsd_u128_r64_fallback));
3886}
3887
3888
3889/** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */
3890FNIEMOP_DEF(iemOp_vminps_Vps_Hps_Wps)
3891{
3892 IEMOP_MNEMONIC3(VEX_RVM, VMINPS, vminps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3893 IEMOPMEDIAF3_INIT_VARS( vminps);
3894 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3895}
3896
3897
3898/** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */
3899FNIEMOP_DEF(iemOp_vminpd_Vpd_Hpd_Wpd)
3900{
3901 IEMOP_MNEMONIC3(VEX_RVM, VMINPD, vminpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3902 IEMOPMEDIAF3_INIT_VARS( vminpd);
3903 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3904}
3905
3906
3907/** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */
3908FNIEMOP_DEF(iemOp_vminss_Vss_Hss_Wss)
3909{
3910 IEMOP_MNEMONIC3(VEX_RVM, VMINSS, vminss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3911 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3912 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vminss_u128_r32, iemAImpl_vminss_u128_r32_fallback));
3913}
3914
3915
3916/** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */
3917FNIEMOP_DEF(iemOp_vminsd_Vsd_Hsd_Wsd)
3918{
3919 IEMOP_MNEMONIC3(VEX_RVM, VMINSD, vminsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3920 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3921 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vminsd_u128_r64, iemAImpl_vminsd_u128_r64_fallback));
3922}
3923
3924
3925/** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */
3926FNIEMOP_DEF(iemOp_vdivps_Vps_Hps_Wps)
3927{
3928 IEMOP_MNEMONIC3(VEX_RVM, VDIVPS, vdivps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3929 IEMOPMEDIAF3_INIT_VARS( vdivps);
3930 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3931}
3932
3933
3934/** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */
3935FNIEMOP_DEF(iemOp_vdivpd_Vpd_Hpd_Wpd)
3936{
3937 IEMOP_MNEMONIC3(VEX_RVM, VDIVPD, vdivpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3938 IEMOPMEDIAF3_INIT_VARS( vdivpd);
3939 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3940}
3941
3942
3943/** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */
3944FNIEMOP_DEF(iemOp_vdivss_Vss_Hss_Wss)
3945{
3946 IEMOP_MNEMONIC3(VEX_RVM, VDIVSS, vdivss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3947 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3948 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdivss_u128_r32, iemAImpl_vdivss_u128_r32_fallback));
3949}
3950
3951
3952/** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */
3953FNIEMOP_DEF(iemOp_vdivsd_Vsd_Hsd_Wsd)
3954{
3955 IEMOP_MNEMONIC3(VEX_RVM, VDIVSD, vdivsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3956 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3957 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vdivsd_u128_r64, iemAImpl_vdivsd_u128_r64_fallback));
3958}
3959
3960
3961/** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */
3962FNIEMOP_DEF(iemOp_vmaxps_Vps_Hps_Wps)
3963{
3964 IEMOP_MNEMONIC3(VEX_RVM, VMAXPS, vmaxps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3965 IEMOPMEDIAF3_INIT_VARS( vmaxps);
3966 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3967}
3968
3969
3970/** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */
3971FNIEMOP_DEF(iemOp_vmaxpd_Vpd_Hpd_Wpd)
3972{
3973 IEMOP_MNEMONIC3(VEX_RVM, VMAXPD, vmaxpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3974 IEMOPMEDIAF3_INIT_VARS( vmaxpd);
3975 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
3976}
3977
3978
3979/** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */
3980FNIEMOP_DEF(iemOp_vmaxss_Vss_Hss_Wss)
3981{
3982 IEMOP_MNEMONIC3(VEX_RVM, VMAXSS, vmaxss, Vps, Hps, Wss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3983 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R32,
3984 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmaxss_u128_r32, iemAImpl_vmaxss_u128_r32_fallback));
3985}
3986
3987
3988/** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */
3989FNIEMOP_DEF(iemOp_vmaxsd_Vsd_Hsd_Wsd)
3990{
3991 IEMOP_MNEMONIC3(VEX_RVM, VMAXSD, vmaxsd, Vpd, Hpd, Wsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
3992 return FNIEMOP_CALL_1(iemOpCommonAvx_Vx_Hx_R64,
3993 IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vmaxsd_u128_r64, iemAImpl_vmaxsd_u128_r64_fallback));
3994}
3995
3996
3997/* Opcode VEX.0F 0x60 - invalid */
3998
3999
4000/** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, Wx */
4001FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx)
4002{
4003 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLBW, vpunpcklbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4004 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklbw);
4005 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4006}
4007
4008
4009/* Opcode VEX.F3.0F 0x60 - invalid */
4010
4011
4012/* Opcode VEX.0F 0x61 - invalid */
4013
4014
4015/** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */
4016FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx)
4017{
4018 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLWD, vpunpcklwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4019 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklwd);
4020 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4021}
4022
4023
4024/* Opcode VEX.F3.0F 0x61 - invalid */
4025
4026
4027/* Opcode VEX.0F 0x62 - invalid */
4028
4029/** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */
4030FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx)
4031{
4032 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLDQ, vpunpckldq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4033 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckldq);
4034 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4035}
4036
4037
4038/* Opcode VEX.F3.0F 0x62 - invalid */
4039
4040
4041
4042/* Opcode VEX.0F 0x63 - invalid */
4043
4044
4045/** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */
4046FNIEMOP_DEF(iemOp_vpacksswb_Vx_Hx_Wx)
4047{
4048 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSWB, vpacksswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4049 IEMOPMEDIAOPTF3_INIT_VARS( vpacksswb);
4050 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4051}
4052
4053
4054/* Opcode VEX.F3.0F 0x63 - invalid */
4055
4056/* Opcode VEX.0F 0x64 - invalid */
4057
4058
4059/** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */
4060FNIEMOP_DEF(iemOp_vpcmpgtb_Vx_Hx_Wx)
4061{
4062 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTB, vpcmpgtb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4063 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtb);
4064 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4065}
4066
4067
4068/* Opcode VEX.F3.0F 0x64 - invalid */
4069
4070/* Opcode VEX.0F 0x65 - invalid */
4071
4072
4073/** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */
4074FNIEMOP_DEF(iemOp_vpcmpgtw_Vx_Hx_Wx)
4075{
4076 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTW, vpcmpgtw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4077 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtw);
4078 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4079}
4080
4081
4082/* Opcode VEX.F3.0F 0x65 - invalid */
4083
4084/* Opcode VEX.0F 0x66 - invalid */
4085
4086
4087/** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */
4088FNIEMOP_DEF(iemOp_vpcmpgtd_Vx_Hx_Wx)
4089{
4090 IEMOP_MNEMONIC3(VEX_RVM, VPCMPGTD, vpcmpgtd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4091 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpgtd);
4092 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4093}
4094
4095
4096/* Opcode VEX.F3.0F 0x66 - invalid */
4097
4098/* Opcode VEX.0F 0x67 - invalid */
4099
4100
4101/** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */
4102FNIEMOP_DEF(iemOp_vpackuswb_Vx_Hx_W)
4103{
4104 IEMOP_MNEMONIC3(VEX_RVM, VPACKUSWB, vpackuswb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4105 IEMOPMEDIAOPTF3_INIT_VARS( vpackuswb);
4106 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4107}
4108
4109
4110/* Opcode VEX.F3.0F 0x67 - invalid */
4111
4112
4113///**
4114// * Common worker for SSE2 instructions on the form:
4115// * pxxxx xmm1, xmm2/mem128
4116// *
4117// * The 2nd operand is the second half of a register, which in the memory case
4118// * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access
4119// * where it may read the full 128 bits or only the upper 64 bits.
4120// *
4121// * Exceptions type 4.
4122// */
4123//FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
4124//{
4125// uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4126// if (IEM_IS_MODRM_REG_MODE(bRm))
4127// {
4128// /*
4129// * Register, register.
4130// */
4131// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4132// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4133// IEM_MC_ARG(PRTUINT128U, pDst, 0);
4134// IEM_MC_ARG(PCRTUINT128U, pSrc, 1);
4135// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4136// IEM_MC_PREPARE_SSE_USAGE();
4137// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4138// IEM_MC_REF_XREG_U128_CONST(pSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4139// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4140// IEM_MC_ADVANCE_RIP_AND_FINISH();
4141// IEM_MC_END();
4142// }
4143// else
4144// {
4145// /*
4146// * Register, memory.
4147// */
4148// IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4149// IEM_MC_ARG(PRTUINT128U, pDst, 0);
4150// IEM_MC_LOCAL(RTUINT128U, uSrc);
4151// IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1);
4152// IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4153//
4154// IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4155// IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse2);
4156// IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
4157// IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
4158//
4159// IEM_MC_PREPARE_SSE_USAGE();
4160// IEM_MC_REF_XREG_U128(pDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4161// IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
4162//
4163// IEM_MC_ADVANCE_RIP_AND_FINISH();
4164// IEM_MC_END();
4165// }
4166// return VINF_SUCCESS;
4167//}
4168
4169
4170/* Opcode VEX.0F 0x68 - invalid */
4171
4172/** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */
4173FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx)
4174{
4175 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHBW, vpunpckhbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4176 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhbw);
4177 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4178}
4179
4180
4181/* Opcode VEX.F3.0F 0x68 - invalid */
4182
4183
4184/* Opcode VEX.0F 0x69 - invalid */
4185
4186
4187/** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */
4188FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx)
4189{
4190 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHWD, vpunpckhwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4191 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhwd);
4192 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4193}
4194
4195
4196/* Opcode VEX.F3.0F 0x69 - invalid */
4197
4198
4199/* Opcode VEX.0F 0x6a - invalid */
4200
4201
4202/** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */
4203FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W)
4204{
4205 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHDQ, vpunpckhdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4206 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhdq);
4207 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4208}
4209
4210
4211/* Opcode VEX.F3.0F 0x6a - invalid */
4212
4213
4214/* Opcode VEX.0F 0x6b - invalid */
4215
4216
4217/** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */
4218FNIEMOP_DEF(iemOp_vpackssdw_Vx_Hx_Wx)
4219{
4220 IEMOP_MNEMONIC3(VEX_RVM, VPACKSSDW, vpackssdw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4221 IEMOPMEDIAOPTF3_INIT_VARS( vpackssdw);
4222 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4223}
4224
4225
4226/* Opcode VEX.F3.0F 0x6b - invalid */
4227
4228
4229/* Opcode VEX.0F 0x6c - invalid */
4230
4231
4232/** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */
4233FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx)
4234{
4235 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKLQDQ, vpunpcklqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4236 IEMOPMEDIAOPTF3_INIT_VARS( vpunpcklqdq);
4237 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_LowSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4238}
4239
4240
4241/* Opcode VEX.F3.0F 0x6c - invalid */
4242/* Opcode VEX.F2.0F 0x6c - invalid */
4243
4244
4245/* Opcode VEX.0F 0x6d - invalid */
4246
4247
4248/** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */
4249FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W)
4250{
4251 IEMOP_MNEMONIC3(VEX_RVM, VPUNPCKHQDQ, vpunpckhqdq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4252 IEMOPMEDIAOPTF3_INIT_VARS( vpunpckhqdq);
4253 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_HighSrc, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4254}
4255
4256
4257/* Opcode VEX.F3.0F 0x6d - invalid */
4258
4259
4260/* Opcode VEX.0F 0x6e - invalid */
4261
4262FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey)
4263{
4264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4265 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT();
4266 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
4267 {
4268 /**
4269 * @opcode 0x6e
4270 * @opcodesub rex.w=1
4271 * @oppfx 0x66
4272 * @opcpuid avx
4273 * @opgroup og_avx_simdint_datamov
4274 * @opxcpttype 5
4275 * @optest 64-bit / op1=1 op2=2 -> op1=2
4276 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
4277 */
4278 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Eq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4279 if (IEM_IS_MODRM_REG_MODE(bRm))
4280 {
4281 /* XMM, greg64 */
4282 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4283 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4284 IEM_MC_LOCAL(uint64_t, u64Tmp);
4285
4286 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4287 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4288
4289 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4290 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4291
4292 IEM_MC_ADVANCE_RIP_AND_FINISH();
4293 IEM_MC_END();
4294 }
4295 else
4296 {
4297 /* XMM, [mem64] */
4298 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4300 IEM_MC_LOCAL(uint64_t, u64Tmp);
4301
4302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4303 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4304 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4305 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4306
4307 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4308 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
4309
4310 IEM_MC_ADVANCE_RIP_AND_FINISH();
4311 IEM_MC_END();
4312 }
4313 }
4314 else
4315 {
4316 /**
4317 * @opdone
4318 * @opcode 0x6e
4319 * @opcodesub rex.w=0
4320 * @oppfx 0x66
4321 * @opcpuid avx
4322 * @opgroup og_avx_simdint_datamov
4323 * @opxcpttype 5
4324 * @opfunction iemOp_vmovd_q_Vy_Ey
4325 * @optest op1=1 op2=2 -> op1=2
4326 * @optest op1=0 op2=-42 -> op1=-42
4327 */
4328 IEMOP_MNEMONIC2(VEX_RM, VMOVD, vmovd, Vd_WO, Ed, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
4329 if (IEM_IS_MODRM_REG_MODE(bRm))
4330 {
4331 /* XMM, greg32 */
4332 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4333 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4334 IEM_MC_LOCAL(uint32_t, u32Tmp);
4335
4336 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4337 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4338
4339 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
4340 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4341
4342 IEM_MC_ADVANCE_RIP_AND_FINISH();
4343 IEM_MC_END();
4344 }
4345 else
4346 {
4347 /* XMM, [mem32] */
4348 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4350 IEM_MC_LOCAL(uint32_t, u32Tmp);
4351
4352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4353 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
4354 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4355 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4356
4357 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4358 IEM_MC_STORE_YREG_U32_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
4359
4360 IEM_MC_ADVANCE_RIP_AND_FINISH();
4361 IEM_MC_END();
4362 }
4363 }
4364}
4365
4366
4367/* Opcode VEX.F3.0F 0x6e - invalid */
4368
4369
4370/* Opcode VEX.0F 0x6f - invalid */
4371
4372/**
4373 * @opcode 0x6f
4374 * @oppfx 0x66
4375 * @opcpuid avx
4376 * @opgroup og_avx_simdint_datamove
4377 * @opxcpttype 1
4378 * @optest op1=1 op2=2 -> op1=2
4379 * @optest op1=0 op2=-42 -> op1=-42
4380 */
4381FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx)
4382{
4383 IEMOP_MNEMONIC2(VEX_RM, VMOVDQA, vmovdqa, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4384 Assert(pVCpu->iem.s.uVexLength <= 1);
4385 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4386 if (IEM_IS_MODRM_REG_MODE(bRm))
4387 {
4388 /*
4389 * Register, register.
4390 */
4391 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4392 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4393
4394 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4395 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4396 if (pVCpu->iem.s.uVexLength == 0)
4397 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4398 IEM_GET_MODRM_RM(pVCpu, bRm));
4399 else
4400 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4401 IEM_GET_MODRM_RM(pVCpu, bRm));
4402 IEM_MC_ADVANCE_RIP_AND_FINISH();
4403 IEM_MC_END();
4404 }
4405 else if (pVCpu->iem.s.uVexLength == 0)
4406 {
4407 /*
4408 * Register, memory128.
4409 */
4410 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4411 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4413
4414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4415 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4416 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4417 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4418
4419 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4420 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4421
4422 IEM_MC_ADVANCE_RIP_AND_FINISH();
4423 IEM_MC_END();
4424 }
4425 else
4426 {
4427 /*
4428 * Register, memory256.
4429 */
4430 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4431 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4433
4434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4435 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4436 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4437 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4438
4439 IEM_MC_FETCH_MEM_U256_ALIGN_AVX(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4440 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
4441
4442 IEM_MC_ADVANCE_RIP_AND_FINISH();
4443 IEM_MC_END();
4444 }
4445}
4446
4447/**
4448 * @opcode 0x6f
4449 * @oppfx 0xf3
4450 * @opcpuid avx
4451 * @opgroup og_avx_simdint_datamove
4452 * @opxcpttype 4UA
4453 * @optest op1=1 op2=2 -> op1=2
4454 * @optest op1=0 op2=-42 -> op1=-42
4455 */
4456FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx)
4457{
4458 IEMOP_MNEMONIC2(VEX_RM, VMOVDQU, vmovdqu, Vx_WO, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
4459 Assert(pVCpu->iem.s.uVexLength <= 1);
4460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4461 if (IEM_IS_MODRM_REG_MODE(bRm))
4462 {
4463 /*
4464 * Register, register.
4465 */
4466 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4467 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4468
4469 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4470 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4471 if (pVCpu->iem.s.uVexLength == 0)
4472 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4473 IEM_GET_MODRM_RM(pVCpu, bRm));
4474 else
4475 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
4476 IEM_GET_MODRM_RM(pVCpu, bRm));
4477 IEM_MC_ADVANCE_RIP_AND_FINISH();
4478 IEM_MC_END();
4479 }
4480 else if (pVCpu->iem.s.uVexLength == 0)
4481 {
4482 /*
4483 * Register, memory128.
4484 */
4485 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4486 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
4487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4488
4489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4490 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4491 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4492 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4493
4494 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4495 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
4496
4497 IEM_MC_ADVANCE_RIP_AND_FINISH();
4498 IEM_MC_END();
4499 }
4500 else
4501 {
4502 /*
4503 * Register, memory256.
4504 */
4505 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4506 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
4507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4508
4509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4510 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4511 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4512 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
4513
4514 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4515 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
4516
4517 IEM_MC_ADVANCE_RIP_AND_FINISH();
4518 IEM_MC_END();
4519 }
4520}
4521
4522
4523/* Opcode VEX.0F 0x70 - invalid */
4524
4525
4526/**
4527 * Common worker for AVX/AVX2 instructions on the forms:
4528 * - vpxxx xmm0, xmm2/mem128, imm8
4529 * - vpxxx ymm0, ymm2/mem256, imm8
4530 *
4531 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
4532 */
4533FNIEMOP_DEF_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
4534{
4535 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4536 if (IEM_IS_MODRM_REG_MODE(bRm))
4537 {
4538 /*
4539 * Register, register.
4540 */
4541 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4542 if (pVCpu->iem.s.uVexLength)
4543 {
4544 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4545 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
4546 IEM_MC_LOCAL(RTUINT256U, uDst);
4547 IEM_MC_LOCAL(RTUINT256U, uSrc);
4548 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
4549 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4550 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4551 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4552 IEM_MC_PREPARE_AVX_USAGE();
4553 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4554 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
4555 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
4556 IEM_MC_ADVANCE_RIP_AND_FINISH();
4557 IEM_MC_END();
4558 }
4559 else
4560 {
4561 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4562 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4563 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4564 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4565 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4566 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4567 IEM_MC_PREPARE_AVX_USAGE();
4568 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4569 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4570 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
4571 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4572 IEM_MC_ADVANCE_RIP_AND_FINISH();
4573 IEM_MC_END();
4574 }
4575 }
4576 else
4577 {
4578 /*
4579 * Register, memory.
4580 */
4581 if (pVCpu->iem.s.uVexLength)
4582 {
4583 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4584 IEM_MC_LOCAL(RTUINT256U, uDst);
4585 IEM_MC_LOCAL(RTUINT256U, uSrc);
4586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4587 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
4588 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4589
4590 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4591 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4592 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
4593 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4594 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4595 IEM_MC_PREPARE_AVX_USAGE();
4596
4597 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4598 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
4599 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
4600
4601 IEM_MC_ADVANCE_RIP_AND_FINISH();
4602 IEM_MC_END();
4603 }
4604 else
4605 {
4606 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4607 IEM_MC_LOCAL(RTUINT128U, uSrc);
4608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4609 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4610 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
4611
4612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
4613 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4614 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
4615 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4616 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4617 IEM_MC_PREPARE_AVX_USAGE();
4618
4619 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
4620 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
4621 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
4622 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
4623
4624 IEM_MC_ADVANCE_RIP_AND_FINISH();
4625 IEM_MC_END();
4626 }
4627 }
4628}
4629
4630
4631/** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */
4632FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib)
4633{
4634 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFD, vpshufd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4635 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufd_u128,
4636 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufd_u256, iemAImpl_vpshufd_u256_fallback));
4637
4638}
4639
4640
4641/** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */
4642FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib)
4643{
4644 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFHW, vpshufhw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4645 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshufhw_u128,
4646 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshufhw_u256, iemAImpl_vpshufhw_u256_fallback));
4647
4648}
4649
4650
4651/** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */
4652FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib)
4653{
4654 IEMOP_MNEMONIC3(VEX_RMI, VPSHUFLW, vpshuflw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4655 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_vpshufXX_Vx_Wx_Ib, iemAImpl_pshuflw_u128,
4656 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpshuflw_u256, iemAImpl_vpshuflw_u256_fallback));
4657}
4658
4659
4660/**
4661 * Common worker(s) for AVX/AVX2 instructions on the forms:
4662 * - vpxxx xmm0, xmm2, imm8
4663 * - vpxxx ymm0, ymm2, imm8
4664 *
4665 * Exceptions type 4. AVX cpuid check for 128-bit operation, AVX2 for 256-bit.
4666 */
4667FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU128, pfnU128)
4668{
4669 if (IEM_IS_MODRM_REG_MODE(bRm))
4670 {
4671 /*
4672 * Register, register.
4673 */
4674 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4675 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4676 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
4677 IEM_MC_ARG(PRTUINT128U, puDst, 0);
4678 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
4679 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4680 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4681 IEM_MC_PREPARE_AVX_USAGE();
4682 IEM_MC_REF_XREG_U128(puDst, IEM_GET_EFFECTIVE_VVVV(pVCpu));
4683 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4684 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
4685 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_EFFECTIVE_VVVV(pVCpu));
4686 IEM_MC_ADVANCE_RIP_AND_FINISH();
4687 IEM_MC_END();
4688 }
4689 /* No memory operand. */
4690 else
4691 IEMOP_RAISE_INVALID_OPCODE_RET();
4692}
4693
4694FNIEMOP_DEF_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, uint8_t, bRm, PFNIEMAIMPLMEDIAPSHUFU256, pfnU256)
4695{
4696 if (IEM_IS_MODRM_REG_MODE(bRm))
4697 {
4698 /*
4699 * Register, register.
4700 */
4701 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
4702 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
4703 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2);
4704 IEM_MC_LOCAL(RTUINT256U, uDst);
4705 IEM_MC_LOCAL(RTUINT256U, uSrc);
4706 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0);
4707 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
4708 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
4709 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
4710 IEM_MC_PREPARE_AVX_USAGE();
4711 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
4712 IEM_MC_CALL_VOID_AIMPL_3(pfnU256, puDst, puSrc, bImmArg);
4713 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_EFFECTIVE_VVVV(pVCpu), uDst);
4714 IEM_MC_ADVANCE_RIP_AND_FINISH();
4715 IEM_MC_END();
4716 }
4717 /* No memory operand. */
4718 else
4719 IEMOP_RAISE_INVALID_OPCODE_RET();
4720}
4721
4722
4723/* Opcode VEX.0F 0x71 11/2 - invalid. */
4724/** Opcode VEX.66.0F 0x71 11/2. */
4725FNIEMOP_DEF_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm)
4726{
4727 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLW, vpsrlw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4728 if (pVCpu->iem.s.uVexLength)
4729 {
4730 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4731 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u256, iemAImpl_vpsrlw_imm_u256_fallback));
4732 }
4733 else
4734 {
4735 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4736 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlw_imm_u128, iemAImpl_vpsrlw_imm_u128_fallback));
4737 }
4738}
4739
4740
4741/* Opcode VEX.0F 0x71 11/4 - invalid */
4742/** Opcode VEX.66.0F 0x71 11/4. */
4743FNIEMOP_DEF_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm)
4744{
4745 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAW, vpsraw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4746 if (pVCpu->iem.s.uVexLength)
4747 {
4748 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4749 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u256, iemAImpl_vpsraw_imm_u256_fallback));
4750 }
4751 else
4752 {
4753 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4754 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsraw_imm_u128, iemAImpl_vpsraw_imm_u128_fallback));
4755 }
4756}
4757
4758/* Opcode VEX.0F 0x71 11/6 - invalid */
4759
4760/** Opcode VEX.66.0F 0x71 11/6. */
4761FNIEMOP_DEF_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm)
4762{
4763 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLW, vpsllw, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4764 if (pVCpu->iem.s.uVexLength)
4765 {
4766 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4767 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u256, iemAImpl_vpsllw_imm_u256_fallback));
4768 }
4769 else
4770 {
4771 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4772 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllw_imm_u128, iemAImpl_vpsllw_imm_u128_fallback));
4773 }
4774}
4775
4776
4777/**
4778 * VEX Group 12 jump table for register variant.
4779 */
4780IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] =
4781{
4782 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4783 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4784 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4785 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4786 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4787 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4788 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4789 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4790};
4791AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4);
4792
4793
4794/** Opcode VEX.0F 0x71. */
4795FNIEMOP_DEF(iemOp_VGrp12)
4796{
4797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4798 if (IEM_IS_MODRM_REG_MODE(bRm))
4799 /* register, register */
4800 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4801 + pVCpu->iem.s.idxPrefix], bRm);
4802 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4803}
4804
4805
4806/* Opcode VEX.0F 0x72 11/2 - invalid. */
4807/** Opcode VEX.66.0F 0x72 11/2. */
4808FNIEMOP_DEF_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm)
4809{
4810 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLD, vpsrld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4811 if (pVCpu->iem.s.uVexLength)
4812 {
4813 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4814 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u256, iemAImpl_vpsrld_imm_u256_fallback));
4815 }
4816 else
4817 {
4818 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4819 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrld_imm_u128, iemAImpl_vpsrld_imm_u128_fallback));
4820 }
4821}
4822
4823
4824/* Opcode VEX.0F 0x72 11/4 - invalid. */
4825/** Opcode VEX.66.0F 0x72 11/4. */
4826FNIEMOP_DEF_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm)
4827{
4828 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRAD, vpsrad, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4829 if (pVCpu->iem.s.uVexLength)
4830 {
4831 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4832 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u256, iemAImpl_vpsrad_imm_u256_fallback));
4833 }
4834 else
4835 {
4836 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4837 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrad_imm_u128, iemAImpl_vpsrad_imm_u128_fallback));
4838 }
4839}
4840
4841/* Opcode VEX.0F 0x72 11/6 - invalid. */
4842
4843/** Opcode VEX.66.0F 0x72 11/6. */
4844FNIEMOP_DEF_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm)
4845{
4846 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLD, vpslld, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4847 if (pVCpu->iem.s.uVexLength)
4848 {
4849 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4850 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u256, iemAImpl_vpslld_imm_u256_fallback));
4851 }
4852 else
4853 {
4854 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4855 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslld_imm_u128, iemAImpl_vpslld_imm_u128_fallback));
4856 }
4857}
4858
4859
4860/**
4861 * Group 13 jump table for register variant.
4862 */
4863IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] =
4864{
4865 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4866 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4867 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4868 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4869 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4870 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4871 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4872 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8)
4873};
4874AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4);
4875
4876/** Opcode VEX.0F 0x72. */
4877FNIEMOP_DEF(iemOp_VGrp13)
4878{
4879 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4880 if (IEM_IS_MODRM_REG_MODE(bRm))
4881 /* register, register */
4882 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4883 + pVCpu->iem.s.idxPrefix], bRm);
4884 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4885}
4886
4887
4888/* Opcode VEX.0F 0x73 11/2 - invalid. */
4889/** Opcode VEX.66.0F 0x73 11/2. */
4890FNIEMOP_DEF_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm)
4891{
4892 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLQ, vpsrlq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4893 if (pVCpu->iem.s.uVexLength)
4894 {
4895 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4896 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u256, iemAImpl_vpsrlq_imm_u256_fallback));
4897 }
4898 else
4899 {
4900 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4901 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrlq_imm_u128, iemAImpl_vpsrlq_imm_u128_fallback));
4902 }
4903}
4904
4905
4906/** Opcode VEX.66.0F 0x73 11/3. */
4907FNIEMOP_DEF_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm)
4908{
4909 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSRLDQ, vpsrldq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4910 if (pVCpu->iem.s.uVexLength)
4911 {
4912 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4913 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrldq_imm_u256, iemAImpl_vpsrldq_imm_u256_fallback));
4914 }
4915 else
4916 {
4917 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4918 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsrldq_imm_u128, iemAImpl_vpsrldq_imm_u128_fallback));
4919 }
4920}
4921
4922/* Opcode VEX.0F 0x73 11/6 - invalid. */
4923
4924/** Opcode VEX.66.0F 0x73 11/6. */
4925FNIEMOP_DEF_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm)
4926{
4927 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLQ, vpsllq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4928 if (pVCpu->iem.s.uVexLength)
4929 {
4930 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4931 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u256, iemAImpl_vpsllq_imm_u256_fallback));
4932 }
4933 else
4934 {
4935 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4936 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpsllq_imm_u128, iemAImpl_vpsllq_imm_u128_fallback));
4937 }
4938}
4939
4940/** Opcode VEX.66.0F 0x73 11/7. */
4941FNIEMOP_DEF_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm)
4942{
4943 IEMOP_MNEMONIC3(VEX_VMI_REG, VPSLLDQ, vpslldq, Hx, Ux, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
4944 if (pVCpu->iem.s.uVexLength)
4945 {
4946 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u256, bRm,
4947 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslldq_imm_u256, iemAImpl_vpslldq_imm_u256_fallback));
4948 }
4949 else
4950 {
4951 return FNIEMOP_CALL_2(iemOpCommonAvxAvx2_Hx_Ux_Ib_u128, bRm,
4952 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpslldq_imm_u128, iemAImpl_vpslldq_imm_u128_fallback));
4953 }
4954}
4955
4956/* Opcode VEX.0F 0x73 11/6 - invalid. */
4957
4958/**
4959 * Group 14 jump table for register variant.
4960 */
4961IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] =
4962{
4963 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4964 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4965 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4966 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4967 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4968 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8),
4969 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4970 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,
4971};
4972AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4);
4973
4974
4975/** Opcode VEX.0F 0x73. */
4976FNIEMOP_DEF(iemOp_VGrp14)
4977{
4978 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4979 if (IEM_IS_MODRM_REG_MODE(bRm))
4980 /* register, register */
4981 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ IEM_GET_MODRM_REG_8(bRm) * 4
4982 + pVCpu->iem.s.idxPrefix], bRm);
4983 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm);
4984}
4985
4986
4987/* Opcode VEX.0F 0x74 - invalid */
4988
4989
4990/** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */
4991FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx)
4992{
4993 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQB, vpcmpeqb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
4994 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqb);
4995 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
4996}
4997
4998/* Opcode VEX.F3.0F 0x74 - invalid */
4999/* Opcode VEX.F2.0F 0x74 - invalid */
5000
5001
5002/* Opcode VEX.0F 0x75 - invalid */
5003
5004
5005/** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */
5006FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx)
5007{
5008 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQW, vpcmpeqw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5009 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqw);
5010 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5011}
5012
5013
5014/* Opcode VEX.F3.0F 0x75 - invalid */
5015/* Opcode VEX.F2.0F 0x75 - invalid */
5016
5017
5018/* Opcode VEX.0F 0x76 - invalid */
5019
5020
5021/** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */
5022FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx)
5023{
5024 IEMOP_MNEMONIC3(VEX_RVM, VPCMPEQD, vpcmpeqd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
5025 IEMOPMEDIAOPTF3_INIT_VARS( vpcmpeqd);
5026 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
5027}
5028
5029
5030/* Opcode VEX.F3.0F 0x76 - invalid */
5031/* Opcode VEX.F2.0F 0x76 - invalid */
5032
5033
5034/** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */
5035FNIEMOP_DEF(iemOp_vzeroupperv__vzeroallv)
5036{
5037 Assert(pVCpu->iem.s.uVexLength <= 1);
5038 if (pVCpu->iem.s.uVexLength == 0)
5039 {
5040 /*
5041 * 128-bit: vzeroupper
5042 */
5043 IEMOP_MNEMONIC(vzeroupper, "vzeroupper");
5044 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5045
5046 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5047 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5048 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5049
5050 IEM_MC_CLEAR_YREG_128_UP(0);
5051 IEM_MC_CLEAR_YREG_128_UP(1);
5052 IEM_MC_CLEAR_YREG_128_UP(2);
5053 IEM_MC_CLEAR_YREG_128_UP(3);
5054 IEM_MC_CLEAR_YREG_128_UP(4);
5055 IEM_MC_CLEAR_YREG_128_UP(5);
5056 IEM_MC_CLEAR_YREG_128_UP(6);
5057 IEM_MC_CLEAR_YREG_128_UP(7);
5058
5059 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
5060 {
5061 IEM_MC_CLEAR_YREG_128_UP( 8);
5062 IEM_MC_CLEAR_YREG_128_UP( 9);
5063 IEM_MC_CLEAR_YREG_128_UP(10);
5064 IEM_MC_CLEAR_YREG_128_UP(11);
5065 IEM_MC_CLEAR_YREG_128_UP(12);
5066 IEM_MC_CLEAR_YREG_128_UP(13);
5067 IEM_MC_CLEAR_YREG_128_UP(14);
5068 IEM_MC_CLEAR_YREG_128_UP(15);
5069 }
5070
5071 IEM_MC_ADVANCE_RIP_AND_FINISH();
5072 IEM_MC_END();
5073 }
5074 else
5075 {
5076 /*
5077 * 256-bit: vzeroall
5078 */
5079 IEMOP_MNEMONIC(vzeroall, "vzeroall");
5080 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5081
5082 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5083 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5084 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5085
5086 IEM_MC_LOCAL_CONST(uint32_t, uZero, 0);
5087 IEM_MC_STORE_YREG_U32_ZX_VLMAX(0, uZero);
5088 IEM_MC_STORE_YREG_U32_ZX_VLMAX(1, uZero);
5089 IEM_MC_STORE_YREG_U32_ZX_VLMAX(2, uZero);
5090 IEM_MC_STORE_YREG_U32_ZX_VLMAX(3, uZero);
5091 IEM_MC_STORE_YREG_U32_ZX_VLMAX(4, uZero);
5092 IEM_MC_STORE_YREG_U32_ZX_VLMAX(5, uZero);
5093 IEM_MC_STORE_YREG_U32_ZX_VLMAX(6, uZero);
5094 IEM_MC_STORE_YREG_U32_ZX_VLMAX(7, uZero);
5095
5096 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
5097 {
5098 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 8, uZero);
5099 IEM_MC_STORE_YREG_U32_ZX_VLMAX( 9, uZero);
5100 IEM_MC_STORE_YREG_U32_ZX_VLMAX(10, uZero);
5101 IEM_MC_STORE_YREG_U32_ZX_VLMAX(11, uZero);
5102 IEM_MC_STORE_YREG_U32_ZX_VLMAX(12, uZero);
5103 IEM_MC_STORE_YREG_U32_ZX_VLMAX(13, uZero);
5104 IEM_MC_STORE_YREG_U32_ZX_VLMAX(14, uZero);
5105 IEM_MC_STORE_YREG_U32_ZX_VLMAX(15, uZero);
5106 }
5107
5108 IEM_MC_ADVANCE_RIP_AND_FINISH();
5109 IEM_MC_END();
5110 }
5111}
5112
5113
5114/* Opcode VEX.66.0F 0x77 - invalid */
5115/* Opcode VEX.F3.0F 0x77 - invalid */
5116/* Opcode VEX.F2.0F 0x77 - invalid */
5117
5118/* Opcode VEX.0F 0x78 - invalid */
5119/* Opcode VEX.66.0F 0x78 - invalid */
5120/* Opcode VEX.F3.0F 0x78 - invalid */
5121/* Opcode VEX.F2.0F 0x78 - invalid */
5122
5123/* Opcode VEX.0F 0x79 - invalid */
5124/* Opcode VEX.66.0F 0x79 - invalid */
5125/* Opcode VEX.F3.0F 0x79 - invalid */
5126/* Opcode VEX.F2.0F 0x79 - invalid */
5127
5128/* Opcode VEX.0F 0x7a - invalid */
5129/* Opcode VEX.66.0F 0x7a - invalid */
5130/* Opcode VEX.F3.0F 0x7a - invalid */
5131/* Opcode VEX.F2.0F 0x7a - invalid */
5132
5133/* Opcode VEX.0F 0x7b - invalid */
5134/* Opcode VEX.66.0F 0x7b - invalid */
5135/* Opcode VEX.F3.0F 0x7b - invalid */
5136/* Opcode VEX.F2.0F 0x7b - invalid */
5137
5138/* Opcode VEX.0F 0x7c - invalid */
5139
5140
5141/** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */
5142FNIEMOP_DEF(iemOp_vhaddpd_Vpd_Hpd_Wpd)
5143{
5144 IEMOP_MNEMONIC3(VEX_RVM, VHADDPD, vhaddpd, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5145 IEMOPMEDIAF3_INIT_VARS( vhaddpd);
5146 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
5147}
5148
5149
5150/* Opcode VEX.F3.0F 0x7c - invalid */
5151
5152
5153/** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */
5154FNIEMOP_DEF(iemOp_vhaddps_Vps_Hps_Wps)
5155{
5156 IEMOP_MNEMONIC3(VEX_RVM, VHADDPS, vhaddps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5157 IEMOPMEDIAF3_INIT_VARS( vhaddps);
5158 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
5159}
5160
5161
5162/* Opcode VEX.0F 0x7d - invalid */
5163
5164
5165/** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */
5166FNIEMOP_DEF(iemOp_vhsubpd_Vpd_Hpd_Wpd)
5167{
5168 IEMOP_MNEMONIC3(VEX_RVM, VHSUBPD, vhsubpd, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5169 IEMOPMEDIAF3_INIT_VARS( vhsubpd);
5170 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
5171}
5172
5173
5174/* Opcode VEX.F3.0F 0x7d - invalid */
5175
5176
5177/** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */
5178FNIEMOP_DEF(iemOp_vhsubps_Vps_Hps_Wps)
5179{
5180 IEMOP_MNEMONIC3(VEX_RVM, VHSUBPS, vhsubps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5181 IEMOPMEDIAF3_INIT_VARS( vhsubps);
5182 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
5183}
5184
5185
5186/* Opcode VEX.0F 0x7e - invalid */
5187
5188FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy)
5189{
5190 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5191 IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT();
5192 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
5193 {
5194 /**
5195 * @opcode 0x7e
5196 * @opcodesub rex.w=1
5197 * @oppfx 0x66
5198 * @opcpuid avx
5199 * @opgroup og_avx_simdint_datamov
5200 * @opxcpttype 5
5201 * @optest 64-bit / op1=1 op2=2 -> op1=2
5202 * @optest 64-bit / op1=0 op2=-42 -> op1=-42
5203 */
5204 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Eq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
5205 if (IEM_IS_MODRM_REG_MODE(bRm))
5206 {
5207 /* greg64, XMM */
5208 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5209 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5210 IEM_MC_LOCAL(uint64_t, u64Tmp);
5211
5212 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5213 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5214
5215 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
5216 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp);
5217
5218 IEM_MC_ADVANCE_RIP_AND_FINISH();
5219 IEM_MC_END();
5220 }
5221 else
5222 {
5223 /* [mem64], XMM */
5224 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0);
5225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5226 IEM_MC_LOCAL(uint64_t, u64Tmp);
5227
5228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5229 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5230 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5231 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5232
5233 IEM_MC_FETCH_YREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
5234 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp);
5235
5236 IEM_MC_ADVANCE_RIP_AND_FINISH();
5237 IEM_MC_END();
5238 }
5239 }
5240 else
5241 {
5242 /**
5243 * @opdone
5244 * @opcode 0x7e
5245 * @opcodesub rex.w=0
5246 * @oppfx 0x66
5247 * @opcpuid avx
5248 * @opgroup og_avx_simdint_datamov
5249 * @opxcpttype 5
5250 * @opfunction iemOp_vmovd_q_Vy_Ey
5251 * @optest op1=1 op2=2 -> op1=2
5252 * @optest op1=0 op2=-42 -> op1=-42
5253 */
5254 IEMOP_MNEMONIC2(VEX_MR, VMOVD, vmovd, Ed_WO, Vd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_VEX_L_ZERO);
5255 if (IEM_IS_MODRM_REG_MODE(bRm))
5256 {
5257 /* greg32, XMM */
5258 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5259 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5260 IEM_MC_LOCAL(uint32_t, u32Tmp);
5261
5262 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5263 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5264
5265 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5266 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp);
5267
5268 IEM_MC_ADVANCE_RIP_AND_FINISH();
5269 IEM_MC_END();
5270 }
5271 else
5272 {
5273 /* [mem32], XMM */
5274 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5276 IEM_MC_LOCAL(uint32_t, u32Tmp);
5277
5278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5279 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5280 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5281 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5282
5283 IEM_MC_FETCH_YREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5284 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp);
5285
5286 IEM_MC_ADVANCE_RIP_AND_FINISH();
5287 IEM_MC_END();
5288 }
5289 }
5290}
5291
5292
5293/**
5294 * @opcode 0x7e
5295 * @oppfx 0xf3
5296 * @opcpuid avx
5297 * @opgroup og_avx_pcksclr_datamove
5298 * @opxcpttype none
5299 * @optest op1=1 op2=2 -> op1=2
5300 * @optest op1=0 op2=-42 -> op1=-42
5301 */
5302FNIEMOP_DEF(iemOp_vmovq_Vq_Wq)
5303{
5304 IEMOP_MNEMONIC2(VEX_RM, VMOVQ, vmovq, Vq_WO, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
5305 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5306 if (IEM_IS_MODRM_REG_MODE(bRm))
5307 {
5308 /*
5309 * Register, register.
5310 */
5311 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5312 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5313
5314 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5315 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5316
5317 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm),
5318 IEM_GET_MODRM_RM(pVCpu, bRm));
5319 IEM_MC_ADVANCE_RIP_AND_FINISH();
5320 IEM_MC_END();
5321 }
5322 else
5323 {
5324 /*
5325 * Memory, register.
5326 */
5327 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5328 IEM_MC_LOCAL(uint64_t, uSrc);
5329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5330
5331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5332 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5333 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5334 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5335
5336 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5337 IEM_MC_STORE_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
5338
5339 IEM_MC_ADVANCE_RIP_AND_FINISH();
5340 IEM_MC_END();
5341 }
5342
5343}
5344/* Opcode VEX.F2.0F 0x7e - invalid */
5345
5346
5347/* Opcode VEX.0F 0x7f - invalid */
5348
5349/**
5350 * @opcode 0x7f
5351 * @oppfx 0x66
5352 * @opcpuid avx
5353 * @opgroup og_avx_simdint_datamove
5354 * @opxcpttype 1
5355 * @optest op1=1 op2=2 -> op1=2
5356 * @optest op1=0 op2=-42 -> op1=-42
5357 */
5358FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx)
5359{
5360 IEMOP_MNEMONIC2(VEX_MR, VMOVDQA, vmovdqa, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5361 Assert(pVCpu->iem.s.uVexLength <= 1);
5362 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5363 if (IEM_IS_MODRM_REG_MODE(bRm))
5364 {
5365 /*
5366 * Register, register.
5367 */
5368 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5369 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5370
5371 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5372 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5373 if (pVCpu->iem.s.uVexLength == 0)
5374 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5375 IEM_GET_MODRM_REG(pVCpu, bRm));
5376 else
5377 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5378 IEM_GET_MODRM_REG(pVCpu, bRm));
5379 IEM_MC_ADVANCE_RIP_AND_FINISH();
5380 IEM_MC_END();
5381 }
5382 else if (pVCpu->iem.s.uVexLength == 0)
5383 {
5384 /*
5385 * Register, memory128.
5386 */
5387 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5388 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5389 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5390
5391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5392 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5393 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5394 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5395
5396 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
5397 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5398
5399 IEM_MC_ADVANCE_RIP_AND_FINISH();
5400 IEM_MC_END();
5401 }
5402 else
5403 {
5404 /*
5405 * Register, memory256.
5406 */
5407 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5408 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5410
5411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5412 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5413 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5414 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5415
5416 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5417 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
5418
5419 IEM_MC_ADVANCE_RIP_AND_FINISH();
5420 IEM_MC_END();
5421 }
5422}
5423
5424
5425/**
5426 * @opcode 0x7f
5427 * @oppfx 0xf3
5428 * @opcpuid avx
5429 * @opgroup og_avx_simdint_datamove
5430 * @opxcpttype 4UA
5431 * @optest op1=1 op2=2 -> op1=2
5432 * @optest op1=0 op2=-42 -> op1=-42
5433 */
5434FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx)
5435{
5436 IEMOP_MNEMONIC2(VEX_MR, VMOVDQU, vmovdqu, Wx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
5437 Assert(pVCpu->iem.s.uVexLength <= 1);
5438 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5439 if (IEM_IS_MODRM_REG_MODE(bRm))
5440 {
5441 /*
5442 * Register, register.
5443 */
5444 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5445 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5446
5447 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5448 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
5449 if (pVCpu->iem.s.uVexLength == 0)
5450 IEM_MC_COPY_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5451 IEM_GET_MODRM_REG(pVCpu, bRm));
5452 else
5453 IEM_MC_COPY_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
5454 IEM_GET_MODRM_REG(pVCpu, bRm));
5455 IEM_MC_ADVANCE_RIP_AND_FINISH();
5456 IEM_MC_END();
5457 }
5458 else if (pVCpu->iem.s.uVexLength == 0)
5459 {
5460 /*
5461 * Register, memory128.
5462 */
5463 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5464 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
5465 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5466
5467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5468 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5469 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5470 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5471
5472 IEM_MC_FETCH_YREG_U128(u128Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
5473 IEM_MC_STORE_MEM_U128_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp);
5474
5475 IEM_MC_ADVANCE_RIP_AND_FINISH();
5476 IEM_MC_END();
5477 }
5478 else
5479 {
5480 /*
5481 * Register, memory256.
5482 */
5483 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5484 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
5485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5486
5487 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5488 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
5489 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5490 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
5491
5492 IEM_MC_FETCH_YREG_U256(u256Tmp, IEM_GET_MODRM_REG(pVCpu, bRm));
5493 IEM_MC_STORE_MEM_U256_NO_AC(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u256Tmp);
5494
5495 IEM_MC_ADVANCE_RIP_AND_FINISH();
5496 IEM_MC_END();
5497 }
5498}
5499
5500/* Opcode VEX.F2.0F 0x7f - invalid */
5501
5502
5503/* Opcode VEX.0F 0x80 - invalid */
5504/* Opcode VEX.0F 0x81 - invalid */
5505/* Opcode VEX.0F 0x82 - invalid */
5506/* Opcode VEX.0F 0x83 - invalid */
5507/* Opcode VEX.0F 0x84 - invalid */
5508/* Opcode VEX.0F 0x85 - invalid */
5509/* Opcode VEX.0F 0x86 - invalid */
5510/* Opcode VEX.0F 0x87 - invalid */
5511/* Opcode VEX.0F 0x88 - invalid */
5512/* Opcode VEX.0F 0x89 - invalid */
5513/* Opcode VEX.0F 0x8a - invalid */
5514/* Opcode VEX.0F 0x8b - invalid */
5515/* Opcode VEX.0F 0x8c - invalid */
5516/* Opcode VEX.0F 0x8d - invalid */
5517/* Opcode VEX.0F 0x8e - invalid */
5518/* Opcode VEX.0F 0x8f - invalid */
5519/* Opcode VEX.0F 0x90 - invalid */
5520/* Opcode VEX.0F 0x91 - invalid */
5521/* Opcode VEX.0F 0x92 - invalid */
5522/* Opcode VEX.0F 0x93 - invalid */
5523/* Opcode VEX.0F 0x94 - invalid */
5524/* Opcode VEX.0F 0x95 - invalid */
5525/* Opcode VEX.0F 0x96 - invalid */
5526/* Opcode VEX.0F 0x97 - invalid */
5527/* Opcode VEX.0F 0x98 - invalid */
5528/* Opcode VEX.0F 0x99 - invalid */
5529/* Opcode VEX.0F 0x9a - invalid */
5530/* Opcode VEX.0F 0x9b - invalid */
5531/* Opcode VEX.0F 0x9c - invalid */
5532/* Opcode VEX.0F 0x9d - invalid */
5533/* Opcode VEX.0F 0x9e - invalid */
5534/* Opcode VEX.0F 0x9f - invalid */
5535/* Opcode VEX.0F 0xa0 - invalid */
5536/* Opcode VEX.0F 0xa1 - invalid */
5537/* Opcode VEX.0F 0xa2 - invalid */
5538/* Opcode VEX.0F 0xa3 - invalid */
5539/* Opcode VEX.0F 0xa4 - invalid */
5540/* Opcode VEX.0F 0xa5 - invalid */
5541/* Opcode VEX.0F 0xa6 - invalid */
5542/* Opcode VEX.0F 0xa7 - invalid */
5543/* Opcode VEX.0F 0xa8 - invalid */
5544/* Opcode VEX.0F 0xa9 - invalid */
5545/* Opcode VEX.0F 0xaa - invalid */
5546/* Opcode VEX.0F 0xab - invalid */
5547/* Opcode VEX.0F 0xac - invalid */
5548/* Opcode VEX.0F 0xad - invalid */
5549
5550
5551/* Opcode VEX.0F 0xae mem/0 - invalid. */
5552/* Opcode VEX.0F 0xae mem/1 - invalid. */
5553
5554/**
5555 * @ opmaps grp15
5556 * @ opcode !11/2
5557 * @ oppfx none
5558 * @ opcpuid sse
5559 * @ opgroup og_sse_mxcsrsm
5560 * @ opxcpttype 5
5561 * @ optest op1=0 -> mxcsr=0
5562 * @ optest op1=0x2083 -> mxcsr=0x2083
5563 * @ optest op1=0xfffffffe -> value.xcpt=0xd
5564 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7
5565 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6
5566 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083
5567 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6
5568 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6
5569 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5570 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5571 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5572 */
5573FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm)
5574{
5575 IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, Md_RO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
5576 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5577 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5579 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5580 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5581 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
5582 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_MxCsr), iemCImpl_vldmxcsr, iEffSeg, GCPtrEff);
5583 IEM_MC_END();
5584}
5585
5586
5587/**
5588 * @opmaps vexgrp15
5589 * @opcode !11/3
5590 * @oppfx none
5591 * @opcpuid avx
5592 * @opgroup og_avx_mxcsrsm
5593 * @opxcpttype 5
5594 * @optest mxcsr=0 -> op1=0
5595 * @optest mxcsr=0x2083 -> op1=0x2083
5596 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7
5597 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085
5598 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6
5599 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086
5600 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087
5601 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6
5602 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6
5603 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7
5604 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6
5605 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089
5606 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6
5607 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7
5608 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6
5609 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7
5610 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6
5611 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6
5612 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c
5613 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6
5614 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d
5615 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6
5616 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7
5617 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6
5618 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave
5619 * -> value.xcpt=0x6
5620 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also
5621 * doesn't seem to check XCR0[2:1] != 11b. This does not match the
5622 * APMv4 rev 3.17 page 509.
5623 * @todo Test this instruction on AMD Ryzen.
5624 */
5625FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm)
5626{
5627 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, Md_WO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
5628 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5629 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
5630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5631 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
5632 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ();
5633 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
5634 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_vstmxcsr, iEffSeg, GCPtrEff);
5635 IEM_MC_END();
5636}
5637
5638/* Opcode VEX.0F 0xae mem/4 - invalid. */
5639/* Opcode VEX.0F 0xae mem/5 - invalid. */
5640/* Opcode VEX.0F 0xae mem/6 - invalid. */
5641/* Opcode VEX.0F 0xae mem/7 - invalid. */
5642
5643/* Opcode VEX.0F 0xae 11b/0 - invalid. */
5644/* Opcode VEX.0F 0xae 11b/1 - invalid. */
5645/* Opcode VEX.0F 0xae 11b/2 - invalid. */
5646/* Opcode VEX.0F 0xae 11b/3 - invalid. */
5647/* Opcode VEX.0F 0xae 11b/4 - invalid. */
5648/* Opcode VEX.0F 0xae 11b/5 - invalid. */
5649/* Opcode VEX.0F 0xae 11b/6 - invalid. */
5650/* Opcode VEX.0F 0xae 11b/7 - invalid. */
5651
5652/**
5653 * Vex group 15 jump table for memory variant.
5654 */
5655IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =
5656{ /* pfx: none, 066h, 0f3h, 0f2h */
5657 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5658 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5659 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5660 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5661 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5662 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5663 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5664 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,
5665};
5666AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);
5667
5668
5669/** Opcode vex. 0xae. */
5670FNIEMOP_DEF(iemOp_VGrp15)
5671{
5672 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5673 if (IEM_IS_MODRM_REG_MODE(bRm))
5674 /* register, register */
5675 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm);
5676
5677 /* memory, register */
5678 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ IEM_GET_MODRM_REG_8(bRm) * 4
5679 + pVCpu->iem.s.idxPrefix], bRm);
5680}
5681
5682
5683/* Opcode VEX.0F 0xaf - invalid. */
5684
5685/* Opcode VEX.0F 0xb0 - invalid. */
5686/* Opcode VEX.0F 0xb1 - invalid. */
5687/* Opcode VEX.0F 0xb2 - invalid. */
5688/* Opcode VEX.0F 0xb2 - invalid. */
5689/* Opcode VEX.0F 0xb3 - invalid. */
5690/* Opcode VEX.0F 0xb4 - invalid. */
5691/* Opcode VEX.0F 0xb5 - invalid. */
5692/* Opcode VEX.0F 0xb6 - invalid. */
5693/* Opcode VEX.0F 0xb7 - invalid. */
5694/* Opcode VEX.0F 0xb8 - invalid. */
5695/* Opcode VEX.0F 0xb9 - invalid. */
5696/* Opcode VEX.0F 0xba - invalid. */
5697/* Opcode VEX.0F 0xbb - invalid. */
5698/* Opcode VEX.0F 0xbc - invalid. */
5699/* Opcode VEX.0F 0xbd - invalid. */
5700/* Opcode VEX.0F 0xbe - invalid. */
5701/* Opcode VEX.0F 0xbf - invalid. */
5702
5703/* Opcode VEX.0F 0xc0 - invalid. */
5704/* Opcode VEX.66.0F 0xc0 - invalid. */
5705/* Opcode VEX.F3.0F 0xc0 - invalid. */
5706/* Opcode VEX.F2.0F 0xc0 - invalid. */
5707
5708/* Opcode VEX.0F 0xc1 - invalid. */
5709/* Opcode VEX.66.0F 0xc1 - invalid. */
5710/* Opcode VEX.F3.0F 0xc1 - invalid. */
5711/* Opcode VEX.F2.0F 0xc1 - invalid. */
5712
5713#define IEMOP_VCMPP_BODY(a_Instr) \
5714 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
5715 if (IEM_IS_MODRM_REG_MODE(bRm)) \
5716 { \
5717 /* \
5718 * Register, Register. \
5719 */ \
5720 if (pVCpu->iem.s.uVexLength) \
5721 { \
5722 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5723 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5724 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5725 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5726 IEM_MC_PREPARE_AVX_USAGE(); \
5727 IEM_MC_LOCAL(X86YMMREG, uDst); \
5728 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0); \
5729 IEM_MC_LOCAL(IEMMEDIAF2YMMSRC, uSrc); \
5730 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2YMMSRC, puSrc, uSrc, 1); \
5731 IEM_MC_FETCH_YREG_PAIR_YMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
5732 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5733 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5734 RT_CONCAT3(iemAImpl_,a_Instr,_u256), \
5735 RT_CONCAT3(iemAImpl_,a_Instr,_u256_fallback)), \
5736 puDst, puSrc, bImmArg); \
5737 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5738 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5739 IEM_MC_END(); \
5740 } \
5741 else \
5742 { \
5743 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5744 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5745 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5746 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5747 IEM_MC_PREPARE_AVX_USAGE(); \
5748 IEM_MC_LOCAL(X86XMMREG, uDst); \
5749 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0); \
5750 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc); \
5751 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1); \
5752 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); \
5753 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5754 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5755 RT_CONCAT3(iemAImpl_,a_Instr,_u128), \
5756 RT_CONCAT3(iemAImpl_,a_Instr,_u128_fallback)), \
5757 puDst, puSrc, bImmArg); \
5758 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5759 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5760 IEM_MC_END(); \
5761 } \
5762 } \
5763 else \
5764 { \
5765 /* \
5766 * Register, Memory. \
5767 */ \
5768 if (pVCpu->iem.s.uVexLength) \
5769 { \
5770 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
5773 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5774 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5775 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5776 IEM_MC_PREPARE_AVX_USAGE(); \
5777 IEM_MC_LOCAL(IEMMEDIAF2YMMSRC, uSrc); \
5778 IEM_MC_LOCAL(X86YMMREG, uDst); \
5779 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0); \
5780 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2YMMSRC, puSrc, uSrc, 1); \
5781 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5782 IEM_MC_FETCH_MEM_YMM_ALIGN_AVX_AND_YREG_YMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5783 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5784 RT_CONCAT3(iemAImpl_,a_Instr,_u256), \
5785 RT_CONCAT3(iemAImpl_,a_Instr,_u256_fallback)), \
5786 puDst, puSrc, bImmArg); \
5787 IEM_MC_STORE_YREG_YMM_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5788 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5789 IEM_MC_END(); \
5790 } \
5791 else \
5792 { \
5793 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
5794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
5795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
5796 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
5797 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
5798 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
5799 IEM_MC_PREPARE_AVX_USAGE(); \
5800 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc); \
5801 IEM_MC_LOCAL(X86XMMREG, uDst); \
5802 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0); \
5803 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1); \
5804 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); \
5805 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
5806 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, \
5807 RT_CONCAT3(iemAImpl_,a_Instr,_u128), \
5808 RT_CONCAT3(iemAImpl_,a_Instr,_u128_fallback)), \
5809 puDst, puSrc, bImmArg); \
5810 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
5811 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5812 IEM_MC_END(); \
5813 } \
5814 } \
5815 (void)0
5816
5817
5818/** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */
5819FNIEMOP_DEF(iemOp_vcmpps_Vps_Hps_Wps_Ib)
5820{
5821 IEMOP_MNEMONIC4(VEX_RVMI, VCMPPS, vcmpps, Vps, Hps, Wps, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5822 IEMOP_VCMPP_BODY(vcmpps);
5823}
5824
5825
5826/** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */
5827FNIEMOP_DEF(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib)
5828{
5829 IEMOP_MNEMONIC4(VEX_RVMI, VCMPPD, vcmppd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
5830 IEMOP_VCMPP_BODY(vcmppd);
5831}
5832
5833
5834/** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */
5835FNIEMOP_DEF(iemOp_vcmpss_Vss_Hss_Wss_Ib)
5836{
5837 IEMOP_MNEMONIC4(VEX_RVMI, VCMPSS, vcmpss, Vss, Hps, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_IGNORED | IEMOPHINT_IGNORES_REXW);
5838
5839 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5840 if (IEM_IS_MODRM_REG_MODE(bRm))
5841 {
5842 /*
5843 * XMM32, XMM32.
5844 */
5845 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5846 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5847 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5848 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5849 IEM_MC_PREPARE_AVX_USAGE();
5850 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5851 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5852 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
5853 IEM_MC_LOCAL(X86XMMREG, uDst);
5854 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5855 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5856 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpss_u128, iemAImpl_vcmpss_u128_fallback),
5857 puDst, puSrc, bImmArg);
5858 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5859
5860 IEM_MC_ADVANCE_RIP_AND_FINISH();
5861 IEM_MC_END();
5862 }
5863 else
5864 {
5865 /*
5866 * XMM32, [mem32].
5867 */
5868 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5869
5870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5871 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
5872 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5873 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5874 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5875 IEM_MC_PREPARE_AVX_USAGE();
5876
5877 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5878 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5879 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm),
5880 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5881 IEM_MC_LOCAL(X86XMMREG, uDst);
5882 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5883 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5884 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpss_u128, iemAImpl_vcmpss_u128_fallback),
5885 puDst, puSrc, bImmArg);
5886 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5887
5888 IEM_MC_ADVANCE_RIP_AND_FINISH();
5889 IEM_MC_END();
5890 }
5891}
5892
5893
5894/** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */
5895FNIEMOP_DEF(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib)
5896{
5897 IEMOP_MNEMONIC4(VEX_RVMI, VCMPSD, vcmpsd, Vsd, Hpd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_VEX_L_IGNORED | IEMOPHINT_IGNORES_REXW);
5898
5899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5900 if (IEM_IS_MODRM_REG_MODE(bRm))
5901 {
5902 /*
5903 * XMM64, XMM64.
5904 */
5905 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5906 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5907 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5908 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5909 IEM_MC_PREPARE_AVX_USAGE();
5910 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5911 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5912 IEM_MC_FETCH_XREG_PAIR_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
5913 IEM_MC_LOCAL(X86XMMREG, uDst);
5914 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5915 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5916 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpsd_u128, iemAImpl_vcmpsd_u128_fallback),
5917 puDst, puSrc, bImmArg);
5918 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5919
5920 IEM_MC_ADVANCE_RIP_AND_FINISH();
5921 IEM_MC_END();
5922 }
5923 else
5924 {
5925 /*
5926 * XMM64, [mem64].
5927 */
5928 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5929
5930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5931 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
5932 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5933 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx);
5934 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
5935 IEM_MC_PREPARE_AVX_USAGE();
5936
5937 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, uSrc);
5938 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, puSrc, uSrc, 1);
5939 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm),
5940 0 /*a_iQword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
5941 IEM_MC_LOCAL(X86XMMREG, uDst);
5942 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
5943 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
5944 IEM_MC_CALL_AVX_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAvx, iemAImpl_vcmpsd_u128, iemAImpl_vcmpsd_u128_fallback),
5945 puDst, puSrc, bImmArg);
5946 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
5947
5948 IEM_MC_ADVANCE_RIP_AND_FINISH();
5949 IEM_MC_END();
5950 }
5951}
5952
5953
5954/* Opcode VEX.0F 0xc3 - invalid */
5955/* Opcode VEX.66.0F 0xc3 - invalid */
5956/* Opcode VEX.F3.0F 0xc3 - invalid */
5957/* Opcode VEX.F2.0F 0xc3 - invalid */
5958
5959/* Opcode VEX.0F 0xc4 - invalid */
5960
5961
5962/** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */
5963FNIEMOP_DEF(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib)
5964{
5965 /*IEMOP_MNEMONIC4(VEX_RMV, VPINSRW, vpinsrw, Vdq, Vdq, Ey, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);*/ /** @todo */
5966 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5967 if (IEM_IS_MODRM_REG_MODE(bRm))
5968 {
5969 /*
5970 * Register, register.
5971 */
5972 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5973 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
5974 IEM_MC_LOCAL(RTUINT128U, uSrc1);
5975 IEM_MC_LOCAL(uint16_t, uValue);
5976
5977 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
5978 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
5979 IEM_MC_PREPARE_AVX_USAGE();
5980
5981 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
5982 IEM_MC_FETCH_GREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm));
5983 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
5984 IEM_MC_STORE_XREG_U16( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
5985 IEM_MC_ADVANCE_RIP_AND_FINISH();
5986 IEM_MC_END();
5987 }
5988 else
5989 {
5990 /*
5991 * Register, memory.
5992 */
5993 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
5994 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5995 IEM_MC_LOCAL(RTUINT128U, uSrc1);
5996 IEM_MC_LOCAL(uint16_t, uValue);
5997
5998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
5999 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6000 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
6001 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6002 IEM_MC_PREPARE_AVX_USAGE();
6003
6004 IEM_MC_FETCH_XREG_U128(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu));
6005 IEM_MC_FETCH_MEM_U16(uValue, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6006 IEM_MC_STORE_XREG_U128( IEM_GET_MODRM_REG(pVCpu, bRm), uSrc1);
6007 IEM_MC_STORE_XREG_U16( IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7, uValue);
6008 IEM_MC_ADVANCE_RIP_AND_FINISH();
6009 IEM_MC_END();
6010 }
6011}
6012
6013
6014/* Opcode VEX.F3.0F 0xc4 - invalid */
6015/* Opcode VEX.F2.0F 0xc4 - invalid */
6016
6017/* Opcode VEX.0F 0xc5 - invalid */
6018
6019
6020/** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */
6021FNIEMOP_DEF(iemOp_vpextrw_Gd_Udq_Ib)
6022{
6023 IEMOP_MNEMONIC3(VEX_RMI_REG, VPEXTRW, vpextrw, Gd, Ux, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO);
6024 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6025 if (IEM_IS_MODRM_REG_MODE(bRm))
6026 {
6027 /*
6028 * greg32, XMM, imm8.
6029 */
6030 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6031 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6032 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx);
6033 IEM_MC_LOCAL(uint16_t, uValue);
6034 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6035 IEM_MC_PREPARE_AVX_USAGE();
6036 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_RM(pVCpu, bRm), bImm & 7);
6037 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uValue);
6038 IEM_MC_ADVANCE_RIP_AND_FINISH();
6039 IEM_MC_END();
6040 }
6041 /* No memory operand. */
6042 else
6043 IEMOP_RAISE_INVALID_OPCODE_RET();
6044}
6045
6046
6047/* Opcode VEX.F3.0F 0xc5 - invalid */
6048/* Opcode VEX.F2.0F 0xc5 - invalid */
6049
6050
6051#define VSHUFP_X(a_Instr) \
6052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
6053 if (IEM_IS_MODRM_REG_MODE(bRm)) \
6054 { \
6055 /* \
6056 * Register, register. \
6057 */ \
6058 if (pVCpu->iem.s.uVexLength) \
6059 { \
6060 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
6061 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
6062 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
6063 IEM_MC_LOCAL(RTUINT256U, uDst); \
6064 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
6065 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
6066 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
6067 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
6068 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
6069 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
6070 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
6071 IEM_MC_PREPARE_AVX_USAGE(); \
6072 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
6073 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
6074 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
6075 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
6076 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
6077 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6078 IEM_MC_END(); \
6079 } \
6080 else \
6081 { \
6082 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
6083 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
6084 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
6085 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
6086 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
6087 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); \
6088 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
6089 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
6090 IEM_MC_PREPARE_AVX_USAGE(); \
6091 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
6092 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
6093 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \
6094 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
6095 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
6096 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
6097 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6098 IEM_MC_END(); \
6099 } \
6100 } \
6101 else \
6102 { \
6103 /* \
6104 * Register, memory. \
6105 */ \
6106 if (pVCpu->iem.s.uVexLength) \
6107 { \
6108 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
6109 IEM_MC_LOCAL(RTUINT256U, uDst); \
6110 IEM_MC_LOCAL(RTUINT256U, uSrc1); \
6111 IEM_MC_LOCAL(RTUINT256U, uSrc2); \
6112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
6113 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); \
6114 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); \
6115 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); \
6116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
6117 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
6118 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
6119 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); \
6120 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
6121 IEM_MC_PREPARE_AVX_USAGE(); \
6122 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
6123 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
6124 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u256, \
6125 iemAImpl_ ## a_Instr ## _u256_fallback), puDst, puSrc1, puSrc2, bImmArg); \
6126 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); \
6127 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6128 IEM_MC_END(); \
6129 } \
6130 else \
6131 { \
6132 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \
6133 IEM_MC_LOCAL(RTUINT128U, uSrc2); \
6134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
6135 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
6136 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); \
6137 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); \
6138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
6139 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); \
6140 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \
6141 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \
6142 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \
6143 IEM_MC_PREPARE_AVX_USAGE(); \
6144 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
6145 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
6146 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \
6147 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \
6148 iemAImpl_ ## a_Instr ## _u128_fallback), puDst, puSrc1, puSrc2, bImmArg); \
6149 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); \
6150 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6151 IEM_MC_END(); \
6152 } \
6153 } \
6154 (void)0
6155
6156/** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */
6157FNIEMOP_DEF(iemOp_vshufps_Vps_Hps_Wps_Ib)
6158{
6159 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPS, vshufps, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
6160 VSHUFP_X(vshufps);
6161}
6162
6163
6164/** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */
6165FNIEMOP_DEF(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib)
6166{
6167 IEMOP_MNEMONIC4(VEX_RMI, VSHUFPD, vshufpd, Vpd, Hpd, Wpd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_SKIP_PYTHON); /** @todo */
6168 VSHUFP_X(vshufpd);
6169}
6170#undef VSHUFP_X
6171
6172
6173/* Opcode VEX.F3.0F 0xc6 - invalid */
6174/* Opcode VEX.F2.0F 0xc6 - invalid */
6175
6176/* Opcode VEX.0F 0xc7 - invalid */
6177/* Opcode VEX.66.0F 0xc7 - invalid */
6178/* Opcode VEX.F3.0F 0xc7 - invalid */
6179/* Opcode VEX.F2.0F 0xc7 - invalid */
6180
6181/* Opcode VEX.0F 0xc8 - invalid */
6182/* Opcode VEX.0F 0xc9 - invalid */
6183/* Opcode VEX.0F 0xca - invalid */
6184/* Opcode VEX.0F 0xcb - invalid */
6185/* Opcode VEX.0F 0xcc - invalid */
6186/* Opcode VEX.0F 0xcd - invalid */
6187/* Opcode VEX.0F 0xce - invalid */
6188/* Opcode VEX.0F 0xcf - invalid */
6189
6190
6191/* Opcode VEX.0F 0xd0 - invalid */
6192
6193
6194/** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */
6195FNIEMOP_DEF(iemOp_vaddsubpd_Vpd_Hpd_Wpd)
6196{
6197 IEMOP_MNEMONIC3(VEX_RVM, VADDSUBPD, vaddsubpd, Vpd, Hpd, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6198 IEMOPMEDIAF3_INIT_VARS( vaddsubpd);
6199 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
6200}
6201
6202
6203/* Opcode VEX.F3.0F 0xd0 - invalid */
6204
6205
6206/** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */
6207FNIEMOP_DEF(iemOp_vaddsubps_Vps_Hps_Wps)
6208{
6209 IEMOP_MNEMONIC3(VEX_RVM, VADDSUBPS, vaddsubps, Vps, Hps, Wps, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6210 IEMOPMEDIAF3_INIT_VARS( vaddsubps);
6211 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback));
6212}
6213
6214
6215/* Opcode VEX.0F 0xd1 - invalid */
6216
6217
6218/** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */
6219FNIEMOP_DEF(iemOp_vpsrlw_Vx_Hx_W)
6220{
6221 IEMOP_MNEMONIC3(VEX_RVM, VPSRLW, vpsrlw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6222 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlw);
6223 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6224}
6225
6226/* Opcode VEX.F3.0F 0xd1 - invalid */
6227/* Opcode VEX.F2.0F 0xd1 - invalid */
6228
6229/* Opcode VEX.0F 0xd2 - invalid */
6230/** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */
6231FNIEMOP_DEF(iemOp_vpsrld_Vx_Hx_Wx)
6232{
6233 IEMOP_MNEMONIC3(VEX_RVM, VPSRLD, vpsrld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6234 IEMOPMEDIAOPTF3_INIT_VARS(vpsrld);
6235 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6236}
6237
6238/* Opcode VEX.F3.0F 0xd2 - invalid */
6239/* Opcode VEX.F2.0F 0xd2 - invalid */
6240
6241/* Opcode VEX.0F 0xd3 - invalid */
6242/** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */
6243FNIEMOP_DEF(iemOp_vpsrlq_Vx_Hx_Wx)
6244{
6245 IEMOP_MNEMONIC3(VEX_RVM, VPSRLQ, vpsrlq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6246 IEMOPMEDIAOPTF3_INIT_VARS(vpsrlq);
6247 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6248}
6249
6250/* Opcode VEX.F3.0F 0xd3 - invalid */
6251/* Opcode VEX.F2.0F 0xd3 - invalid */
6252
6253/* Opcode VEX.0F 0xd4 - invalid */
6254
6255
6256/** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */
6257FNIEMOP_DEF(iemOp_vpaddq_Vx_Hx_Wx)
6258{
6259 IEMOP_MNEMONIC3(VEX_RVM, VPADDQ, vpaddq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6260 IEMOPMEDIAOPTF3_INIT_VARS( vpaddq);
6261 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6262}
6263
6264
6265/* Opcode VEX.F3.0F 0xd4 - invalid */
6266/* Opcode VEX.F2.0F 0xd4 - invalid */
6267
6268/* Opcode VEX.0F 0xd5 - invalid */
6269
6270
6271/** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */
6272FNIEMOP_DEF(iemOp_vpmullw_Vx_Hx_Wx)
6273{
6274 IEMOP_MNEMONIC3(VEX_RVM, VPMULLW, vpmullw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6275 IEMOPMEDIAOPTF3_INIT_VARS(vpmullw);
6276 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6277}
6278
6279
6280/* Opcode VEX.F3.0F 0xd5 - invalid */
6281/* Opcode VEX.F2.0F 0xd5 - invalid */
6282
6283/* Opcode VEX.0F 0xd6 - invalid */
6284
6285/**
6286 * @opcode 0xd6
6287 * @oppfx 0x66
6288 * @opcpuid avx
6289 * @opgroup og_avx_pcksclr_datamove
6290 * @opxcpttype none
6291 * @optest op1=-1 op2=2 -> op1=2
6292 * @optest op1=0 op2=-42 -> op1=-42
6293 */
6294FNIEMOP_DEF(iemOp_vmovq_Wq_Vq)
6295{
6296 IEMOP_MNEMONIC2(VEX_MR, VMOVQ, vmovq, Wq_WO, Vq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO);
6297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6298 if (IEM_IS_MODRM_REG_MODE(bRm))
6299 {
6300 /*
6301 * Register, register.
6302 */
6303 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6304 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
6305
6306 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6307 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
6308
6309 IEM_MC_COPY_YREG_U64_ZX_VLMAX(IEM_GET_MODRM_RM(pVCpu, bRm),
6310 IEM_GET_MODRM_REG(pVCpu, bRm));
6311 IEM_MC_ADVANCE_RIP_AND_FINISH();
6312 IEM_MC_END();
6313 }
6314 else
6315 {
6316 /*
6317 * Memory, register.
6318 */
6319 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6320 IEM_MC_LOCAL(uint64_t, uSrc);
6321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6322
6323 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6324 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
6325 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6326 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
6327
6328 IEM_MC_FETCH_YREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQWord*/);
6329 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
6330
6331 IEM_MC_ADVANCE_RIP_AND_FINISH();
6332 IEM_MC_END();
6333 }
6334}
6335
6336/* Opcode VEX.F3.0F 0xd6 - invalid */
6337/* Opcode VEX.F2.0F 0xd6 - invalid */
6338
6339
6340/* Opcode VEX.0F 0xd7 - invalid */
6341
6342/** Opcode VEX.66.0F 0xd7 - */
6343FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux)
6344{
6345 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6346 /* Docs says register only. */
6347 if (IEM_IS_MODRM_REG_MODE(bRm)) /** @todo test that this is registers only. */
6348 {
6349 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
6350 IEMOP_MNEMONIC2(VEX_RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0);
6351 if (pVCpu->iem.s.uVexLength)
6352 {
6353 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6354 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2);
6355 IEM_MC_ARG(uint64_t *, puDst, 0);
6356 IEM_MC_LOCAL(RTUINT256U, uSrc);
6357 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc, uSrc, 1);
6358 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6359 IEM_MC_PREPARE_AVX_USAGE();
6360 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6361 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6362 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_vpmovmskb_u256,
6363 iemAImpl_vpmovmskb_u256_fallback), puDst, puSrc);
6364 IEM_MC_ADVANCE_RIP_AND_FINISH();
6365 IEM_MC_END();
6366 }
6367 else
6368 {
6369 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6370 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6371 IEM_MC_ARG(uint64_t *, puDst, 0);
6372 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
6373 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6374 IEM_MC_PREPARE_AVX_USAGE();
6375 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
6376 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6377 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_pmovmskb_u128, puDst, puSrc);
6378 IEM_MC_ADVANCE_RIP_AND_FINISH();
6379 IEM_MC_END();
6380 }
6381 }
6382 else
6383 IEMOP_RAISE_INVALID_OPCODE_RET();
6384}
6385
6386
6387/* Opcode VEX.F3.0F 0xd7 - invalid */
6388/* Opcode VEX.F2.0F 0xd7 - invalid */
6389
6390
6391/* Opcode VEX.0F 0xd8 - invalid */
6392
6393/** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, Wx */
6394FNIEMOP_DEF(iemOp_vpsubusb_Vx_Hx_Wx)
6395{
6396 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSB, vpsubusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6397 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusb);
6398 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6399}
6400
6401
6402/* Opcode VEX.F3.0F 0xd8 - invalid */
6403/* Opcode VEX.F2.0F 0xd8 - invalid */
6404
6405/* Opcode VEX.0F 0xd9 - invalid */
6406
6407
6408/** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */
6409FNIEMOP_DEF(iemOp_vpsubusw_Vx_Hx_Wx)
6410{
6411 IEMOP_MNEMONIC3(VEX_RVM, VPSUBUSW, vpsubusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6412 IEMOPMEDIAOPTF3_INIT_VARS(vpsubusw);
6413 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6414}
6415
6416
6417/* Opcode VEX.F3.0F 0xd9 - invalid */
6418/* Opcode VEX.F2.0F 0xd9 - invalid */
6419
6420/* Opcode VEX.0F 0xda - invalid */
6421
6422
6423/** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */
6424FNIEMOP_DEF(iemOp_vpminub_Vx_Hx_Wx)
6425{
6426 IEMOP_MNEMONIC3(VEX_RVM, VPMINUB, vpminub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6427 IEMOPMEDIAOPTF3_INIT_VARS(vpminub);
6428 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6429}
6430
6431
6432/* Opcode VEX.F3.0F 0xda - invalid */
6433/* Opcode VEX.F2.0F 0xda - invalid */
6434
6435/* Opcode VEX.0F 0xdb - invalid */
6436
6437
6438/** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, Wx */
6439FNIEMOP_DEF(iemOp_vpand_Vx_Hx_Wx)
6440{
6441 IEMOP_MNEMONIC3(VEX_RVM, VPAND, vpand, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6442 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
6443 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpand, &g_iemAImpl_vpand_fallback));
6444}
6445
6446
6447/* Opcode VEX.F3.0F 0xdb - invalid */
6448/* Opcode VEX.F2.0F 0xdb - invalid */
6449
6450/* Opcode VEX.0F 0xdc - invalid */
6451
6452
6453/** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */
6454FNIEMOP_DEF(iemOp_vpaddusb_Vx_Hx_Wx)
6455{
6456 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSB, vpaddusb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6457 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusb);
6458 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6459}
6460
6461
6462/* Opcode VEX.F3.0F 0xdc - invalid */
6463/* Opcode VEX.F2.0F 0xdc - invalid */
6464
6465/* Opcode VEX.0F 0xdd - invalid */
6466
6467
6468/** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */
6469FNIEMOP_DEF(iemOp_vpaddusw_Vx_Hx_Wx)
6470{
6471 IEMOP_MNEMONIC3(VEX_RVM, VPADDUSW, vpaddusw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6472 IEMOPMEDIAOPTF3_INIT_VARS(vpaddusw);
6473 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6474}
6475
6476
6477/* Opcode VEX.F3.0F 0xdd - invalid */
6478/* Opcode VEX.F2.0F 0xdd - invalid */
6479
6480/* Opcode VEX.0F 0xde - invalid */
6481
6482
6483/** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, Wx */
6484FNIEMOP_DEF(iemOp_vpmaxub_Vx_Hx_Wx)
6485{
6486 IEMOP_MNEMONIC3(VEX_RVM, VPMAXUB, vpmaxub, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6487 IEMOPMEDIAOPTF3_INIT_VARS(vpmaxub);
6488 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6489}
6490
6491
6492/* Opcode VEX.F3.0F 0xde - invalid */
6493/* Opcode VEX.F2.0F 0xde - invalid */
6494
6495/* Opcode VEX.0F 0xdf - invalid */
6496
6497
6498/** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */
6499FNIEMOP_DEF(iemOp_vpandn_Vx_Hx_Wx)
6500{
6501 IEMOP_MNEMONIC3(VEX_RVM, VPANDN, vpandn, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
6502 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
6503 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpandn, &g_iemAImpl_vpandn_fallback));
6504}
6505
6506
6507/* Opcode VEX.F3.0F 0xdf - invalid */
6508/* Opcode VEX.F2.0F 0xdf - invalid */
6509
6510/* Opcode VEX.0F 0xe0 - invalid */
6511
6512
6513/** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */
6514FNIEMOP_DEF(iemOp_vpavgb_Vx_Hx_Wx)
6515{
6516 IEMOP_MNEMONIC3(VEX_RVM, VPAVGB, vpavgb, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6517 IEMOPMEDIAOPTF3_INIT_VARS(vpavgb);
6518 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6519}
6520
6521
6522/* Opcode VEX.F3.0F 0xe0 - invalid */
6523/* Opcode VEX.F2.0F 0xe0 - invalid */
6524
6525/* Opcode VEX.0F 0xe1 - invalid */
6526/** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */
6527FNIEMOP_DEF(iemOp_vpsraw_Vx_Hx_W)
6528{
6529 IEMOP_MNEMONIC3(VEX_RVM, VPSRAW, vpsraw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6530 IEMOPMEDIAOPTF3_INIT_VARS(vpsraw);
6531 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6532}
6533
6534/* Opcode VEX.F3.0F 0xe1 - invalid */
6535/* Opcode VEX.F2.0F 0xe1 - invalid */
6536
6537/* Opcode VEX.0F 0xe2 - invalid */
6538/** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */
6539FNIEMOP_DEF(iemOp_vpsrad_Vx_Hx_Wx)
6540{
6541 IEMOP_MNEMONIC3(VEX_RVM, VPSRAD, vpsrad, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6542 IEMOPMEDIAOPTF3_INIT_VARS(vpsrad);
6543 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6544}
6545
6546/* Opcode VEX.F3.0F 0xe2 - invalid */
6547/* Opcode VEX.F2.0F 0xe2 - invalid */
6548
6549/* Opcode VEX.0F 0xe3 - invalid */
6550
6551
6552/** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */
6553FNIEMOP_DEF(iemOp_vpavgw_Vx_Hx_Wx)
6554{
6555 IEMOP_MNEMONIC3(VEX_RVM, VPAVGW, vpavgw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6556 IEMOPMEDIAOPTF3_INIT_VARS(vpavgw);
6557 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6558}
6559
6560
6561/* Opcode VEX.F3.0F 0xe3 - invalid */
6562/* Opcode VEX.F2.0F 0xe3 - invalid */
6563
6564/* Opcode VEX.0F 0xe4 - invalid */
6565
6566
6567/** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, Wx */
6568FNIEMOP_DEF(iemOp_vpmulhuw_Vx_Hx_Wx)
6569{
6570 IEMOP_MNEMONIC3(VEX_RVM, VPMULHUW, vpmulhuw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6571 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhuw);
6572 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6573}
6574
6575
6576/* Opcode VEX.F3.0F 0xe4 - invalid */
6577/* Opcode VEX.F2.0F 0xe4 - invalid */
6578
6579/* Opcode VEX.0F 0xe5 - invalid */
6580
6581
6582/** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */
6583FNIEMOP_DEF(iemOp_vpmulhw_Vx_Hx_Wx)
6584{
6585 IEMOP_MNEMONIC3(VEX_RVM, VPMULHW, vpmulhw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6586 IEMOPMEDIAOPTF3_INIT_VARS(vpmulhw);
6587 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
6588}
6589
6590
6591/* Opcode VEX.F3.0F 0xe5 - invalid */
6592/* Opcode VEX.F2.0F 0xe5 - invalid */
6593
6594/* Opcode VEX.0F 0xe6 - invalid */
6595
6596
6597/** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */
6598FNIEMOP_DEF(iemOp_vcvttpd2dq_Vx_Wpd)
6599{
6600 IEMOP_MNEMONIC2(VEX_RM, VCVTTPD2DQ, vcvttpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6601 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6602 if (IEM_IS_MODRM_REG_MODE(bRm))
6603 {
6604 /*
6605 * Register, register.
6606 */
6607 if (pVCpu->iem.s.uVexLength)
6608 {
6609 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6610 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6611 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6612 IEM_MC_PREPARE_AVX_USAGE();
6613
6614 IEM_MC_LOCAL( X86YMMREG, uSrc);
6615 IEM_MC_FETCH_YREG_YMM(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6616 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
6617 IEM_MC_LOCAL( X86XMMREG, uDst);
6618 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
6619 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
6620 iemAImpl_vcvttpd2dq_u128_u256,
6621 iemAImpl_vcvttpd2dq_u128_u256_fallback),
6622 puDst, puSrc);
6623 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
6624 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
6625 IEM_MC_ADVANCE_RIP_AND_FINISH();
6626 IEM_MC_END();
6627 }
6628 else
6629 {
6630 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6631 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6632 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6633 IEM_MC_PREPARE_AVX_USAGE();
6634
6635 IEM_MC_ARG( PCX86XMMREG, puSrc, 1);
6636 IEM_MC_REF_XREG_XMM_CONST( puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6637 IEM_MC_LOCAL( X86XMMREG, uDst);
6638 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
6639 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
6640 iemAImpl_vcvttpd2dq_u128_u128,
6641 iemAImpl_vcvttpd2dq_u128_u128_fallback),
6642 puDst, puSrc);
6643 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
6644 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
6645 IEM_MC_ADVANCE_RIP_AND_FINISH();
6646 IEM_MC_END();
6647 }
6648 }
6649 else
6650 {
6651 /*
6652 * Register, memory.
6653 */
6654 if (pVCpu->iem.s.uVexLength)
6655 {
6656 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6659 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6660 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6661 IEM_MC_PREPARE_AVX_USAGE();
6662
6663 IEM_MC_LOCAL( X86YMMREG, uSrc);
6664 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
6665 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6666 IEM_MC_LOCAL( X86XMMREG, uDst);
6667 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
6668 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
6669 iemAImpl_vcvttpd2dq_u128_u256,
6670 iemAImpl_vcvttpd2dq_u128_u256_fallback),
6671 puDst, puSrc);
6672 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
6673 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
6674 IEM_MC_ADVANCE_RIP_AND_FINISH();
6675 IEM_MC_END();
6676 }
6677 else
6678 {
6679 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6682 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6683 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6684 IEM_MC_PREPARE_AVX_USAGE();
6685
6686 IEM_MC_LOCAL(X86XMMREG, uSrc);
6687 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc, uSrc, 1);
6688 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6689 IEM_MC_LOCAL( X86XMMREG, uDst);
6690 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
6691 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
6692 iemAImpl_vcvttpd2dq_u128_u128,
6693 iemAImpl_vcvttpd2dq_u128_u128_fallback),
6694 puDst, puSrc);
6695 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
6696 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
6697 IEM_MC_ADVANCE_RIP_AND_FINISH();
6698 IEM_MC_END();
6699 }
6700 }
6701}
6702
6703
6704/** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */
6705FNIEMOP_DEF(iemOp_vcvtdq2pd_Vx_Wpd)
6706{
6707 IEMOP_MNEMONIC2(VEX_RM, VCVTDQ2PD, vcvtdq2pd, Vpd, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6708 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6709 if (IEM_IS_MODRM_REG_MODE(bRm))
6710 {
6711 /*
6712 * Register, register.
6713 */
6714 if (pVCpu->iem.s.uVexLength)
6715 {
6716 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6717 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6718 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6719 IEM_MC_PREPARE_AVX_USAGE();
6720
6721 IEM_MC_ARG( PCX86XMMREG, puSrc, 1);
6722 IEM_MC_REF_XREG_XMM_CONST( puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6723 IEM_MC_LOCAL( X86YMMREG, uDst);
6724 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
6725 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
6726 iemAImpl_vcvtdq2pd_u256_u128,
6727 iemAImpl_vcvtdq2pd_u256_u128_fallback),
6728 puDst, puSrc);
6729 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
6730 IEM_MC_ADVANCE_RIP_AND_FINISH();
6731 IEM_MC_END();
6732 }
6733 else
6734 {
6735 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6736 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6737 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6738 IEM_MC_PREPARE_AVX_USAGE();
6739
6740 IEM_MC_ARG( const uint64_t *, pu64Src, 1);
6741 IEM_MC_REF_XREG_U64_CONST( pu64Src, IEM_GET_MODRM_RM(pVCpu, bRm));
6742 IEM_MC_LOCAL( X86XMMREG, uDst);
6743 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
6744 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
6745 iemAImpl_vcvtdq2pd_u128_u64,
6746 iemAImpl_vcvtdq2pd_u128_u64_fallback),
6747 puDst, pu64Src);
6748 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
6749 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
6750 IEM_MC_ADVANCE_RIP_AND_FINISH();
6751 IEM_MC_END();
6752 }
6753 }
6754 else
6755 {
6756 /*
6757 * Register, memory.
6758 */
6759 if (pVCpu->iem.s.uVexLength)
6760 {
6761 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6762 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6764 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6765 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6766 IEM_MC_PREPARE_AVX_USAGE();
6767
6768 IEM_MC_LOCAL(X86XMMREG, uSrc);
6769 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc, uSrc, 1);
6770 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6771 IEM_MC_LOCAL(X86YMMREG, uDst);
6772 IEM_MC_ARG_LOCAL_REF(PX86YMMREG, puDst, uDst, 0);
6773 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
6774 iemAImpl_vcvtdq2pd_u256_u128,
6775 iemAImpl_vcvtdq2pd_u256_u128_fallback),
6776 puDst, puSrc);
6777 IEM_MC_STORE_YREG_YMM_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
6778 IEM_MC_ADVANCE_RIP_AND_FINISH();
6779 IEM_MC_END();
6780 }
6781 else
6782 {
6783 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6786 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6787 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6788 IEM_MC_PREPARE_AVX_USAGE();
6789
6790 IEM_MC_LOCAL( uint64_t, u64Src);
6791 IEM_MC_ARG_LOCAL_REF(const uint64_t *, pu64Src, u64Src, 1);
6792 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6793 IEM_MC_LOCAL( X86XMMREG, uDst);
6794 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
6795 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
6796 iemAImpl_vcvtdq2pd_u128_u64,
6797 iemAImpl_vcvtdq2pd_u128_u64_fallback),
6798 puDst, pu64Src);
6799 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
6800 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
6801 IEM_MC_ADVANCE_RIP_AND_FINISH();
6802 IEM_MC_END();
6803 }
6804 }
6805}
6806
6807
6808/** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */
6809FNIEMOP_DEF(iemOp_vcvtpd2dq_Vx_Wpd)
6810{
6811 IEMOP_MNEMONIC2(VEX_RM, VCVTPD2DQ, vcvtpd2dq, Vx, Wpd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
6812 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6813 if (IEM_IS_MODRM_REG_MODE(bRm))
6814 {
6815 /*
6816 * Register, register.
6817 */
6818 if (pVCpu->iem.s.uVexLength)
6819 {
6820 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6821 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6822 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6823 IEM_MC_PREPARE_AVX_USAGE();
6824
6825 IEM_MC_LOCAL( X86YMMREG, uSrc);
6826 IEM_MC_FETCH_YREG_YMM(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6827 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
6828 IEM_MC_LOCAL( X86XMMREG, uDst);
6829 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
6830 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
6831 iemAImpl_vcvtpd2dq_u128_u256,
6832 iemAImpl_vcvtpd2dq_u128_u256_fallback),
6833 puDst, puSrc);
6834 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
6835 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
6836 IEM_MC_ADVANCE_RIP_AND_FINISH();
6837 IEM_MC_END();
6838 }
6839 else
6840 {
6841 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6842 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6843 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6844 IEM_MC_PREPARE_AVX_USAGE();
6845
6846 IEM_MC_ARG( PCX86XMMREG, puSrc, 1);
6847 IEM_MC_REF_XREG_XMM_CONST( puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
6848 IEM_MC_LOCAL( X86XMMREG, uDst);
6849 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
6850 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
6851 iemAImpl_vcvtpd2dq_u128_u128,
6852 iemAImpl_vcvtpd2dq_u128_u128_fallback),
6853 puDst, puSrc);
6854 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
6855 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
6856 IEM_MC_ADVANCE_RIP_AND_FINISH();
6857 IEM_MC_END();
6858 }
6859 }
6860 else
6861 {
6862 /*
6863 * Register, memory.
6864 */
6865 if (pVCpu->iem.s.uVexLength)
6866 {
6867 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6870 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6871 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6872 IEM_MC_PREPARE_AVX_USAGE();
6873
6874 IEM_MC_LOCAL( X86YMMREG, uSrc);
6875 IEM_MC_ARG_LOCAL_REF(PCX86YMMREG, puSrc, uSrc, 1);
6876 IEM_MC_FETCH_MEM_YMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6877 IEM_MC_LOCAL( X86XMMREG, uDst);
6878 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
6879 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
6880 iemAImpl_vcvtpd2dq_u128_u256,
6881 iemAImpl_vcvtpd2dq_u128_u256_fallback),
6882 puDst, puSrc);
6883 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
6884 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm));
6885 IEM_MC_ADVANCE_RIP_AND_FINISH();
6886 IEM_MC_END();
6887 }
6888 else
6889 {
6890 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6893 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6894 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6895 IEM_MC_PREPARE_AVX_USAGE();
6896
6897 IEM_MC_LOCAL(X86XMMREG, uSrc);
6898 IEM_MC_ARG_LOCAL_REF(PCX86XMMREG, puSrc, uSrc, 1);
6899 IEM_MC_FETCH_MEM_XMM_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6900 IEM_MC_LOCAL( X86XMMREG, uDst);
6901 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, puDst, uDst, 0);
6902 IEM_MC_CALL_AVX_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx,
6903 iemAImpl_vcvtpd2dq_u128_u128,
6904 iemAImpl_vcvtpd2dq_u128_u128_fallback),
6905 puDst, puSrc);
6906 IEM_MC_STORE_XREG_XMM( IEM_GET_MODRM_REG(pVCpu, bRm), uDst);
6907 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm));
6908 IEM_MC_ADVANCE_RIP_AND_FINISH();
6909 IEM_MC_END();
6910 }
6911 }
6912}
6913
6914
6915/* Opcode VEX.0F 0xe7 - invalid */
6916
6917/**
6918 * @opcode 0xe7
6919 * @opcodesub !11 mr/reg
6920 * @oppfx 0x66
6921 * @opcpuid avx
6922 * @opgroup og_avx_cachect
6923 * @opxcpttype 1
6924 * @optest op1=-1 op2=2 -> op1=2
6925 * @optest op1=0 op2=-42 -> op1=-42
6926 */
6927FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx)
6928{
6929 IEMOP_MNEMONIC2(VEX_MR_MEM, VMOVNTDQ, vmovntdq, Mx_WO, Vx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
6930 Assert(pVCpu->iem.s.uVexLength <= 1);
6931 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6932 if (IEM_IS_MODRM_MEM_MODE(bRm))
6933 {
6934 if (pVCpu->iem.s.uVexLength == 0)
6935 {
6936 /*
6937 * 128-bit: Memory, register.
6938 */
6939 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6940 IEM_MC_LOCAL(RTUINT128U, uSrc);
6941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6942
6943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6944 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6945 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6946 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
6947
6948 IEM_MC_FETCH_YREG_U128(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDQWord*/);
6949 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
6950
6951 IEM_MC_ADVANCE_RIP_AND_FINISH();
6952 IEM_MC_END();
6953 }
6954 else
6955 {
6956 /*
6957 * 256-bit: Memory, register.
6958 */
6959 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
6960 IEM_MC_LOCAL(RTUINT256U, uSrc);
6961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6962
6963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6964 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
6965 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
6966 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ();
6967
6968 IEM_MC_FETCH_YREG_U256(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
6969 IEM_MC_STORE_MEM_U256_ALIGN_AVX(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
6970
6971 IEM_MC_ADVANCE_RIP_AND_FINISH();
6972 IEM_MC_END();
6973 }
6974 }
6975 /**
6976 * @opdone
6977 * @opmnemonic udvex660fe7reg
6978 * @opcode 0xe7
6979 * @opcodesub 11 mr/reg
6980 * @oppfx 0x66
6981 * @opunused immediate
6982 * @opcpuid avx
6983 * @optest ->
6984 */
6985 else
6986 IEMOP_RAISE_INVALID_OPCODE_RET();
6987}
6988
6989/* Opcode VEX.F3.0F 0xe7 - invalid */
6990/* Opcode VEX.F2.0F 0xe7 - invalid */
6991
6992
6993/* Opcode VEX.0F 0xe8 - invalid */
6994
6995
6996/** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, Wx */
6997FNIEMOP_DEF(iemOp_vpsubsb_Vx_Hx_Wx)
6998{
6999 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSB, vpsubsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7000 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsb);
7001 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7002}
7003
7004
7005/* Opcode VEX.F3.0F 0xe8 - invalid */
7006/* Opcode VEX.F2.0F 0xe8 - invalid */
7007
7008/* Opcode VEX.0F 0xe9 - invalid */
7009
7010
7011/** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */
7012FNIEMOP_DEF(iemOp_vpsubsw_Vx_Hx_Wx)
7013{
7014 IEMOP_MNEMONIC3(VEX_RVM, VPSUBSW, vpsubsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7015 IEMOPMEDIAOPTF3_INIT_VARS(vpsubsw);
7016 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7017}
7018
7019
7020/* Opcode VEX.F3.0F 0xe9 - invalid */
7021/* Opcode VEX.F2.0F 0xe9 - invalid */
7022
7023/* Opcode VEX.0F 0xea - invalid */
7024
7025
7026/** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */
7027FNIEMOP_DEF(iemOp_vpminsw_Vx_Hx_Wx)
7028{
7029 IEMOP_MNEMONIC3(VEX_RVM, VPMINSW, vpminsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7030 IEMOPMEDIAOPTF3_INIT_VARS(vpminsw);
7031 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7032}
7033
7034
7035/* Opcode VEX.F3.0F 0xea - invalid */
7036/* Opcode VEX.F2.0F 0xea - invalid */
7037
7038/* Opcode VEX.0F 0xeb - invalid */
7039
7040
7041/** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, Wx */
7042FNIEMOP_DEF(iemOp_vpor_Vx_Hx_Wx)
7043{
7044 IEMOP_MNEMONIC3(VEX_RVM, VPOR, vpor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7045 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
7046 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpor, &g_iemAImpl_vpor_fallback));
7047}
7048
7049
7050
7051/* Opcode VEX.F3.0F 0xeb - invalid */
7052/* Opcode VEX.F2.0F 0xeb - invalid */
7053
7054/* Opcode VEX.0F 0xec - invalid */
7055
7056
7057/** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */
7058FNIEMOP_DEF(iemOp_vpaddsb_Vx_Hx_Wx)
7059{
7060 IEMOP_MNEMONIC3(VEX_RVM, VPADDSB, vpaddsb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7061 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsb);
7062 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7063}
7064
7065
7066/* Opcode VEX.F3.0F 0xec - invalid */
7067/* Opcode VEX.F2.0F 0xec - invalid */
7068
7069/* Opcode VEX.0F 0xed - invalid */
7070
7071
7072/** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */
7073FNIEMOP_DEF(iemOp_vpaddsw_Vx_Hx_Wx)
7074{
7075 IEMOP_MNEMONIC3(VEX_RVM, VPADDSW, vpaddsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7076 IEMOPMEDIAOPTF3_INIT_VARS(vpaddsw);
7077 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7078}
7079
7080
7081/* Opcode VEX.F3.0F 0xed - invalid */
7082/* Opcode VEX.F2.0F 0xed - invalid */
7083
7084/* Opcode VEX.0F 0xee - invalid */
7085
7086
7087/** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, Wx */
7088FNIEMOP_DEF(iemOp_vpmaxsw_Vx_Hx_Wx)
7089{
7090 IEMOP_MNEMONIC3(VEX_RVM, VPMAXSW, vpmaxsw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7091 IEMOPMEDIAOPTF3_INIT_VARS(vpmaxsw);
7092 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7093}
7094
7095
7096/* Opcode VEX.F3.0F 0xee - invalid */
7097/* Opcode VEX.F2.0F 0xee - invalid */
7098
7099
7100/* Opcode VEX.0F 0xef - invalid */
7101
7102
7103/** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */
7104FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx)
7105{
7106 IEMOP_MNEMONIC3(VEX_RVM, VPXOR, vpxor, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7107 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt,
7108 IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &g_iemAImpl_vpxor, &g_iemAImpl_vpxor_fallback));
7109}
7110
7111
7112/* Opcode VEX.F3.0F 0xef - invalid */
7113/* Opcode VEX.F2.0F 0xef - invalid */
7114
7115/* Opcode VEX.0F 0xf0 - invalid */
7116/* Opcode VEX.66.0F 0xf0 - invalid */
7117
7118
7119/** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */
7120FNIEMOP_DEF(iemOp_vlddqu_Vx_Mx)
7121{
7122 IEMOP_MNEMONIC2(VEX_RM_MEM, VLDDQU, vlddqu, Vx_WO, Mx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES);
7123 Assert(pVCpu->iem.s.uVexLength <= 1);
7124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7125 if (IEM_IS_MODRM_REG_MODE(bRm))
7126 {
7127 /*
7128 * Register, register - (not implemented, assuming it raises \#UD).
7129 */
7130 IEMOP_RAISE_INVALID_OPCODE_RET();
7131 }
7132 else if (pVCpu->iem.s.uVexLength == 0)
7133 {
7134 /*
7135 * Register, memory128.
7136 */
7137 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
7138 IEM_MC_LOCAL(RTUINT128U, u128Tmp);
7139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7140
7141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7142 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
7143 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
7144 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
7145
7146 IEM_MC_FETCH_MEM_U128_NO_AC(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7147 IEM_MC_STORE_YREG_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u128Tmp);
7148
7149 IEM_MC_ADVANCE_RIP_AND_FINISH();
7150 IEM_MC_END();
7151 }
7152 else
7153 {
7154 /*
7155 * Register, memory256.
7156 */
7157 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
7158 IEM_MC_LOCAL(RTUINT256U, u256Tmp);
7159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7160
7161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7162 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx);
7163 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
7164 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE();
7165
7166 IEM_MC_FETCH_MEM_U256_NO_AC(u256Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7167 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), u256Tmp);
7168
7169 IEM_MC_ADVANCE_RIP_AND_FINISH();
7170 IEM_MC_END();
7171 }
7172}
7173
7174
7175/* Opcode VEX.0F 0xf1 - invalid */
7176/** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */
7177FNIEMOP_DEF(iemOp_vpsllw_Vx_Hx_W)
7178{
7179 IEMOP_MNEMONIC3(VEX_RVM, VPSLLW, vpsllw, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
7180 IEMOPMEDIAOPTF3_INIT_VARS(vpsllw);
7181 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7182}
7183
7184/* Opcode VEX.F2.0F 0xf1 - invalid */
7185
7186/* Opcode VEX.0F 0xf2 - invalid */
7187/** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */
7188FNIEMOP_DEF(iemOp_vpslld_Vx_Hx_Wx)
7189{
7190 IEMOP_MNEMONIC3(VEX_RVM, VPSLLD, vpslld, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
7191 IEMOPMEDIAOPTF3_INIT_VARS(vpslld);
7192 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7193}
7194/* Opcode VEX.F2.0F 0xf2 - invalid */
7195
7196/* Opcode VEX.0F 0xf3 - invalid */
7197/** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */
7198FNIEMOP_DEF(iemOp_vpsllq_Vx_Hx_Wx)
7199{
7200 IEMOP_MNEMONIC3(VEX_RVM, VPSLLQ, vpsllq, Vx, Hx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, 0);
7201 IEMOPMEDIAOPTF3_INIT_VARS(vpsllq);
7202 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7203}
7204/* Opcode VEX.F2.0F 0xf3 - invalid */
7205
7206/* Opcode VEX.0F 0xf4 - invalid */
7207
7208
7209/** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */
7210FNIEMOP_DEF(iemOp_vpmuludq_Vx_Hx_W)
7211{
7212 IEMOP_MNEMONIC3(VEX_RVM, VPMULUDQ, vpmuludq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7213 IEMOPMEDIAOPTF3_INIT_VARS(vpmuludq);
7214 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7215}
7216
7217
7218/* Opcode VEX.F2.0F 0xf4 - invalid */
7219
7220/* Opcode VEX.0F 0xf5 - invalid */
7221
7222
7223/** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */
7224FNIEMOP_DEF(iemOp_vpmaddwd_Vx_Hx_Wx)
7225{
7226 IEMOP_MNEMONIC3(VEX_RVM, VPMADDWD, vpmaddwd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7227 IEMOPMEDIAOPTF3_INIT_VARS(vpmaddwd);
7228 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7229}
7230
7231
7232/* Opcode VEX.F2.0F 0xf5 - invalid */
7233
7234/* Opcode VEX.0F 0xf6 - invalid */
7235
7236
7237/** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */
7238FNIEMOP_DEF(iemOp_vpsadbw_Vx_Hx_Wx)
7239{
7240 IEMOP_MNEMONIC3(VEX_RVM, VPSADBW, vpsadbw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7241 IEMOPMEDIAOPTF3_INIT_VARS(vpsadbw);
7242 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7243}
7244
7245
7246/* Opcode VEX.F2.0F 0xf6 - invalid */
7247
7248/* Opcode VEX.0F 0xf7 - invalid */
7249
7250
7251/** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */
7252FNIEMOP_DEF(iemOp_vmaskmovdqu_Vdq_Udq)
7253{
7254// IEMOP_MNEMONIC2(RM, VMASKMOVDQU, vmaskmovdqu, Vdq, Udq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES); /** @todo */
7255 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7256 if (IEM_IS_MODRM_REG_MODE(bRm))
7257 {
7258 /*
7259 * XMM, XMM, (implicit) [ ER]DI
7260 */
7261 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0);
7262 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx);
7263 IEM_MC_LOCAL( uint64_t, u64EffAddr);
7264 IEM_MC_LOCAL( RTUINT128U, u128Mem);
7265 IEM_MC_ARG_LOCAL_REF(PRTUINT128U, pu128Mem, u128Mem, 0);
7266 IEM_MC_ARG( PCRTUINT128U, puSrc, 1);
7267 IEM_MC_ARG( PCRTUINT128U, puMsk, 2);
7268 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT();
7269 IEM_MC_PREPARE_AVX_USAGE();
7270
7271 IEM_MC_FETCH_GREG_U64(u64EffAddr, X86_GREG_xDI);
7272 IEM_MC_FETCH_MEM_U128(u128Mem, pVCpu->iem.s.iEffSeg, u64EffAddr);
7273 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_REG(pVCpu, bRm));
7274 IEM_MC_REF_XREG_U128_CONST(puMsk, IEM_GET_MODRM_RM(pVCpu, bRm));
7275 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_maskmovdqu_u128, pu128Mem, puSrc, puMsk);
7276 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, u64EffAddr, u128Mem);
7277
7278 IEM_MC_ADVANCE_RIP_AND_FINISH();
7279 IEM_MC_END();
7280 }
7281 else
7282 {
7283 /* The memory, register encoding is invalid. */
7284 IEMOP_RAISE_INVALID_OPCODE_RET();
7285 }
7286}
7287
7288
7289/* Opcode VEX.F2.0F 0xf7 - invalid */
7290
7291/* Opcode VEX.0F 0xf8 - invalid */
7292
7293
7294/** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */
7295FNIEMOP_DEF(iemOp_vpsubb_Vx_Hx_Wx)
7296{
7297 IEMOP_MNEMONIC3(VEX_RVM, VPSUBB, vpsubb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7298 IEMOPMEDIAOPTF3_INIT_VARS( vpsubb);
7299 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7300}
7301
7302
7303/* Opcode VEX.F2.0F 0xf8 - invalid */
7304
7305/* Opcode VEX.0F 0xf9 - invalid */
7306
7307
7308/** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */
7309FNIEMOP_DEF(iemOp_vpsubw_Vx_Hx_Wx)
7310{
7311 IEMOP_MNEMONIC3(VEX_RVM, VPSUBW, vpsubw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7312 IEMOPMEDIAOPTF3_INIT_VARS( vpsubw);
7313 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7314}
7315
7316
7317/* Opcode VEX.F2.0F 0xf9 - invalid */
7318
7319/* Opcode VEX.0F 0xfa - invalid */
7320
7321
7322/** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */
7323FNIEMOP_DEF(iemOp_vpsubd_Vx_Hx_Wx)
7324{
7325 IEMOP_MNEMONIC3(VEX_RVM, VPSUBD, vpsubd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7326 IEMOPMEDIAOPTF3_INIT_VARS( vpsubd);
7327 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7328}
7329
7330
7331/* Opcode VEX.F2.0F 0xfa - invalid */
7332
7333/* Opcode VEX.0F 0xfb - invalid */
7334
7335
7336/** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */
7337FNIEMOP_DEF(iemOp_vpsubq_Vx_Hx_Wx)
7338{
7339 IEMOP_MNEMONIC3(VEX_RVM, VPSUBQ, vpsubq, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7340 IEMOPMEDIAOPTF3_INIT_VARS( vpsubq);
7341 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7342}
7343
7344
7345/* Opcode VEX.F2.0F 0xfb - invalid */
7346
7347/* Opcode VEX.0F 0xfc - invalid */
7348
7349
7350/** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */
7351FNIEMOP_DEF(iemOp_vpaddb_Vx_Hx_Wx)
7352{
7353 IEMOP_MNEMONIC3(VEX_RVM, VPADDB, vpaddb, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7354 IEMOPMEDIAOPTF3_INIT_VARS( vpaddb);
7355 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7356}
7357
7358
7359/* Opcode VEX.F2.0F 0xfc - invalid */
7360
7361/* Opcode VEX.0F 0xfd - invalid */
7362
7363
7364/** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */
7365FNIEMOP_DEF(iemOp_vpaddw_Vx_Hx_Wx)
7366{
7367 IEMOP_MNEMONIC3(VEX_RVM, VPADDW, vpaddw, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7368 IEMOPMEDIAOPTF3_INIT_VARS( vpaddw);
7369 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7370}
7371
7372
7373/* Opcode VEX.F2.0F 0xfd - invalid */
7374
7375/* Opcode VEX.0F 0xfe - invalid */
7376
7377
7378/** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */
7379FNIEMOP_DEF(iemOp_vpaddd_Vx_Hx_Wx)
7380{
7381 IEMOP_MNEMONIC3(VEX_RVM, VPADDD, vpaddd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0);
7382 IEMOPMEDIAOPTF3_INIT_VARS( vpaddd);
7383 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));
7384}
7385
7386
7387/* Opcode VEX.F2.0F 0xfe - invalid */
7388
7389
7390/** Opcode **** 0x0f 0xff - UD0 */
7391FNIEMOP_DEF(iemOp_vud0)
7392{
7393/** @todo testcase: vud0 */
7394 IEMOP_MNEMONIC(vud0, "vud0");
7395 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
7396 {
7397 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
7398 if (IEM_IS_MODRM_MEM_MODE(bRm))
7399 IEM_OPCODE_SKIP_RM_EFF_ADDR_BYTES(bRm);
7400 }
7401 IEMOP_HLP_DONE_DECODING();
7402 IEMOP_RAISE_INVALID_OPCODE_RET();
7403}
7404
7405
7406
7407/**
7408 * VEX opcode map \#1.
7409 *
7410 * @sa g_apfnTwoByteMap
7411 */
7412const PFNIEMOP g_apfnVexMap1[] =
7413{
7414 /* no prefix, 066h prefix f3h prefix, f2h prefix */
7415 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),
7416 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),
7417 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),
7418 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),
7419 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),
7420 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),
7421 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),
7422 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),
7423 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),
7424 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),
7425 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),
7426 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */
7427 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
7428 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
7429 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
7430 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
7431
7432 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vss_Hss_Wss, iemOp_vmovsd_Vsd_Hsd_Wsd,
7433 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hss_Vss, iemOp_vmovsd_Wsd_Hsd_Vsd,
7434 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,
7435 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7436 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7437 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7438 /* 0x16 */ iemOp_vmovhps_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpd_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,
7439 /* 0x17 */ iemOp_vmovhps_Mq_Vq, iemOp_vmovhpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7440 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
7441 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
7442 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
7443 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
7444 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),
7445 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),
7446 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),
7447 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
7448
7449 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),
7450 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),
7451 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),
7452 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),
7453 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),
7454 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),
7455 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
7456 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
7457 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7458 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7459 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,
7460 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7461 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,
7462 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,
7463 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7464 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7465
7466 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),
7467 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),
7468 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),
7469 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),
7470 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),
7471 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),
7472 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
7473 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),
7474 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7475 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7476 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7477 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7478 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7479 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7480 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7481 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */
7482
7483 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),
7484 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),
7485 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
7486 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
7487 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
7488 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
7489 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
7490 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
7491 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
7492 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
7493 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
7494 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
7495 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
7496 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
7497 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
7498 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
7499
7500 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7501 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,
7502 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7503 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,
7504 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7505 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7506 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7507 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7508 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,
7509 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,
7510 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,
7511 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,
7512 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,
7513 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,
7514 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,
7515 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,
7516
7517 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7518 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7519 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7520 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7521 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7522 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7523 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7524 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7525 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7526 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7527 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7528 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7529 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7530 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7531 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7532 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,
7533
7534 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,
7535 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7536 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7537 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7538 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7539 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7540 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7541 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7542 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
7543 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
7544 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
7545 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
7546 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,
7547 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,
7548 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,
7549 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,
7550
7551 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),
7552 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),
7553 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),
7554 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
7555 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
7556 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
7557 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
7558 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
7559 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
7560 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
7561 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
7562 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
7563 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
7564 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
7565 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
7566 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
7567
7568 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
7569 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
7570 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
7571 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
7572 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
7573 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
7574 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
7575 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
7576 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
7577 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
7578 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
7579 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
7580 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
7581 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
7582 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
7583 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
7584
7585 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
7586 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
7587 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
7588 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
7589 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
7590 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
7591 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
7592 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
7593 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
7594 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
7595 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
7596 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
7597 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
7598 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
7599 /* 0xae */ IEMOP_X4(iemOp_VGrp15),
7600 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
7601
7602 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
7603 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
7604 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
7605 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
7606 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
7607 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
7608 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
7609 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
7610 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
7611 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
7612 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
7613 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
7614 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
7615 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
7616 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
7617 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
7618
7619 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
7620 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
7621 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,
7622 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
7623 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7624 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
7625 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,
7626 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
7627 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),
7628 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),
7629 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),
7630 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),
7631 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),
7632 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),
7633 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
7634 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
7635
7636 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,
7637 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7638 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7639 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7640 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7641 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7642 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7643 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7644 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7645 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7646 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7647 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7648 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7649 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7650 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7651 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7652
7653 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7654 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7655 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7656 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7657 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7658 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7659 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,
7660 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7661 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7662 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7663 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7664 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7665 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7666 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7667 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7668 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7669
7670 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,
7671 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7672 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7673 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7674 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7675 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7676 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7677 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7678 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7679 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7680 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7681 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7682 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7683 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7684 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
7685 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */
7686};
7687AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);
7688/** @} */
7689
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette