VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsThree0f38.cpp.h@ 100714

Last change on this file since 100714 was 100714, checked in by vboxsync, 16 months ago

VMM/IEM: Require a IEMOP_HLP_DONE_DECODING in all MC blocks so we know exacly when the recompiler starts emitting code (calls) and we can make sure it's still safe to restart insturction decoding. Also made the python script check this and that nothing that smells like decoding happens after IEMOP_HLP_DONE_DECODING and its friends. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 99.8 KB
Line 
1/* $Id: IEMAllInstructionsThree0f38.cpp.h 100714 2023-07-27 10:12:09Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap2.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Three byte opcodes with first two bytes 0x0f 0x38
33 * @{
34 */
35
36/**
37 * Common worker for MMX instructions on the form:
38 * pxxx mm1, mm2/mem64
39 * that was introduced with SSE3.
40 */
41FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full_Ssse3, PFNIEMAIMPLMEDIAF2U64, pfnU64)
42{
43 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
44 if (IEM_IS_MODRM_REG_MODE(bRm))
45 {
46 /*
47 * MMX, MMX.
48 */
49 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
50 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
51 IEM_MC_BEGIN(2, 0);
52 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSsse3);
53 IEM_MC_ARG(uint64_t *, pDst, 0);
54 IEM_MC_ARG(uint64_t const *, pSrc, 1);
55 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
56 IEM_MC_PREPARE_FPU_USAGE();
57 IEM_MC_FPU_TO_MMX_MODE();
58
59 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
60 IEM_MC_REF_MREG_U64_CONST(pSrc, IEM_GET_MODRM_RM_8(bRm));
61 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
62 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
63
64 IEM_MC_ADVANCE_RIP_AND_FINISH();
65 IEM_MC_END();
66 }
67 else
68 {
69 /*
70 * MMX, [mem64].
71 */
72 IEM_MC_BEGIN(2, 2);
73 IEM_MC_ARG(uint64_t *, pDst, 0);
74 IEM_MC_LOCAL(uint64_t, uSrc);
75 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
76 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
77
78 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
79 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSsse3);
80 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
81 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
82
83 IEM_MC_PREPARE_FPU_USAGE();
84 IEM_MC_FPU_TO_MMX_MODE();
85
86 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
87 IEM_MC_CALL_MMX_AIMPL_2(pfnU64, pDst, pSrc);
88 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
89
90 IEM_MC_ADVANCE_RIP_AND_FINISH();
91 IEM_MC_END();
92 }
93}
94
95
96/**
97 * Common worker for SSSE3 instructions on the forms:
98 * pxxx xmm1, xmm2/mem128
99 *
100 * Proper alignment of the 128-bit operand is enforced.
101 * Exceptions type 4. SSSE3 cpuid checks.
102 *
103 * @sa iemOpCommonSse2_FullFull_To_Full
104 */
105FNIEMOP_DEF_1(iemOpCommonSsse3_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
106{
107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
108 if (IEM_IS_MODRM_REG_MODE(bRm))
109 {
110 /*
111 * Register, register.
112 */
113 IEM_MC_BEGIN(2, 0);
114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSsse3);
115 IEM_MC_ARG(PRTUINT128U, puDst, 0);
116 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
117 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
118 IEM_MC_PREPARE_SSE_USAGE();
119 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
120 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
121 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
122 IEM_MC_ADVANCE_RIP_AND_FINISH();
123 IEM_MC_END();
124 }
125 else
126 {
127 /*
128 * Register, memory.
129 */
130 IEM_MC_BEGIN(2, 2);
131 IEM_MC_ARG(PRTUINT128U, puDst, 0);
132 IEM_MC_LOCAL(RTUINT128U, uSrc);
133 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
135
136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSsse3);
138 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
139 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
140
141 IEM_MC_PREPARE_SSE_USAGE();
142 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
143 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
144
145 IEM_MC_ADVANCE_RIP_AND_FINISH();
146 IEM_MC_END();
147 }
148}
149
150
151/**
152 * Common worker for SSE4.1 instructions on the forms:
153 * pxxx xmm1, xmm2/mem128
154 *
155 * Proper alignment of the 128-bit operand is enforced.
156 * Exceptions type 4. SSE4.1 cpuid checks.
157 *
158 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
159 * iemOpCommonSse42_FullFull_To_Full
160 */
161FNIEMOP_DEF_1(iemOpCommonSse41_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
162{
163 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
164 if (IEM_IS_MODRM_REG_MODE(bRm))
165 {
166 /*
167 * Register, register.
168 */
169 IEM_MC_BEGIN(2, 0);
170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
171 IEM_MC_ARG(PRTUINT128U, puDst, 0);
172 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
173 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
174 IEM_MC_PREPARE_SSE_USAGE();
175 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
176 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
177 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
178 IEM_MC_ADVANCE_RIP_AND_FINISH();
179 IEM_MC_END();
180 }
181 else
182 {
183 /*
184 * Register, memory.
185 */
186 IEM_MC_BEGIN(2, 2);
187 IEM_MC_ARG(PRTUINT128U, puDst, 0);
188 IEM_MC_LOCAL(RTUINT128U, uSrc);
189 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
191
192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
194 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
195 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
196
197 IEM_MC_PREPARE_SSE_USAGE();
198 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
199 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
200
201 IEM_MC_ADVANCE_RIP_AND_FINISH();
202 IEM_MC_END();
203 }
204}
205
206
207/**
208 * Common worker for SSE4.1 instructions on the forms:
209 * pxxx xmm1, xmm2/mem128
210 *
211 * Proper alignment of the 128-bit operand is enforced.
212 * Exceptions type 4. SSE4.1 cpuid checks.
213 *
214 * Unlike iemOpCommonSse41_FullFull_To_Full, the @a pfnU128 worker function
215 * takes no FXSAVE state, just the operands.
216 *
217 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
218 * iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse42_FullFull_To_Full
219 */
220FNIEMOP_DEF_1(iemOpCommonSse41Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
221{
222 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
223 if (IEM_IS_MODRM_REG_MODE(bRm))
224 {
225 /*
226 * Register, register.
227 */
228 IEM_MC_BEGIN(2, 0);
229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
230 IEM_MC_ARG(PRTUINT128U, puDst, 0);
231 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
232 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
233 IEM_MC_PREPARE_SSE_USAGE();
234 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
235 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
236 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
237 IEM_MC_ADVANCE_RIP_AND_FINISH();
238 IEM_MC_END();
239 }
240 else
241 {
242 /*
243 * Register, memory.
244 */
245 IEM_MC_BEGIN(2, 2);
246 IEM_MC_ARG(PRTUINT128U, puDst, 0);
247 IEM_MC_LOCAL(RTUINT128U, uSrc);
248 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
249 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
250
251 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
253 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
254 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
255
256 IEM_MC_PREPARE_SSE_USAGE();
257 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
258 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
259
260 IEM_MC_ADVANCE_RIP_AND_FINISH();
261 IEM_MC_END();
262 }
263}
264
265
266/**
267 * Common worker for SSE4.2 instructions on the forms:
268 * pxxx xmm1, xmm2/mem128
269 *
270 * Proper alignment of the 128-bit operand is enforced.
271 * Exceptions type 4. SSE4.2 cpuid checks.
272 *
273 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
274 * iemOpCommonSse41_FullFull_To_Full
275 */
276FNIEMOP_DEF_1(iemOpCommonSse42_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
277{
278 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
279 if (IEM_IS_MODRM_REG_MODE(bRm))
280 {
281 /*
282 * Register, register.
283 */
284 IEM_MC_BEGIN(2, 0);
285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42);
286 IEM_MC_ARG(PRTUINT128U, puDst, 0);
287 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
288 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
289 IEM_MC_PREPARE_SSE_USAGE();
290 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
291 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
292 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
293 IEM_MC_ADVANCE_RIP_AND_FINISH();
294 IEM_MC_END();
295 }
296 else
297 {
298 /*
299 * Register, memory.
300 */
301 IEM_MC_BEGIN(2, 2);
302 IEM_MC_ARG(PRTUINT128U, puDst, 0);
303 IEM_MC_LOCAL(RTUINT128U, uSrc);
304 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
305 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
306
307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42);
309 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
310 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
311
312 IEM_MC_PREPARE_SSE_USAGE();
313 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
314 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
315
316 IEM_MC_ADVANCE_RIP_AND_FINISH();
317 IEM_MC_END();
318 }
319}
320
321
322/**
323 * Common worker for SSE-style AES-NI instructions of the form:
324 * aesxxx xmm1, xmm2/mem128
325 *
326 * Proper alignment of the 128-bit operand is enforced.
327 * Exceptions type 4. AES-NI cpuid checks.
328 *
329 * Unlike iemOpCommonSse41_FullFull_To_Full, the @a pfnU128 worker function
330 * takes no FXSAVE state, just the operands.
331 *
332 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
333 * iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse42_FullFull_To_Full,
334 * iemOpCommonSha_FullFull_To_Full
335 */
336FNIEMOP_DEF_1(iemOpCommonAesNi_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
337{
338 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
339 if (IEM_IS_MODRM_REG_MODE(bRm))
340 {
341 /*
342 * Register, register.
343 */
344 IEM_MC_BEGIN(2, 0);
345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fAesNi);
346 IEM_MC_ARG(PRTUINT128U, puDst, 0);
347 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
348 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
349 IEM_MC_PREPARE_SSE_USAGE();
350 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
351 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
352 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
353 IEM_MC_ADVANCE_RIP_AND_FINISH();
354 IEM_MC_END();
355 }
356 else
357 {
358 /*
359 * Register, memory.
360 */
361 IEM_MC_BEGIN(2, 2);
362 IEM_MC_ARG(PRTUINT128U, puDst, 0);
363 IEM_MC_LOCAL(RTUINT128U, uSrc);
364 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
366
367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fAesNi);
369 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
370 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
371
372 IEM_MC_PREPARE_SSE_USAGE();
373 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
374 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
375
376 IEM_MC_ADVANCE_RIP_AND_FINISH();
377 IEM_MC_END();
378 }
379}
380
381
382/**
383 * Common worker for SSE-style SHA instructions of the form:
384 * shaxxx xmm1, xmm2/mem128
385 *
386 * Proper alignment of the 128-bit operand is enforced.
387 * Exceptions type 4. SHA cpuid checks.
388 *
389 * Unlike iemOpCommonSse41_FullFull_To_Full, the @a pfnU128 worker function
390 * takes no FXSAVE state, just the operands.
391 *
392 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
393 * iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse42_FullFull_To_Full,
394 * iemOpCommonAesNi_FullFull_To_Full
395 */
396FNIEMOP_DEF_1(iemOpCommonSha_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
397{
398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
399 if (IEM_IS_MODRM_REG_MODE(bRm))
400 {
401 /*
402 * Register, register.
403 */
404 IEM_MC_BEGIN(2, 0);
405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSha);
406 IEM_MC_ARG(PRTUINT128U, puDst, 0);
407 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
408 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
409 IEM_MC_PREPARE_SSE_USAGE();
410 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
411 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
412 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
413 IEM_MC_ADVANCE_RIP_AND_FINISH();
414 IEM_MC_END();
415 }
416 else
417 {
418 /*
419 * Register, memory.
420 */
421 IEM_MC_BEGIN(2, 2);
422 IEM_MC_ARG(PRTUINT128U, puDst, 0);
423 IEM_MC_LOCAL(RTUINT128U, uSrc);
424 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
426
427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSha);
429 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
430 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
431
432 IEM_MC_PREPARE_SSE_USAGE();
433 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
434 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
435
436 IEM_MC_ADVANCE_RIP_AND_FINISH();
437 IEM_MC_END();
438 }
439}
440
441
442/** Opcode 0x0f 0x38 0x00. */
443FNIEMOP_DEF(iemOp_pshufb_Pq_Qq)
444{
445 IEMOP_MNEMONIC2(RM, PSHUFB, pshufb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
446 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Ssse3,
447 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pshufb_u64,&iemAImpl_pshufb_u64_fallback));
448}
449
450
451/** Opcode 0x66 0x0f 0x38 0x00. */
452FNIEMOP_DEF(iemOp_pshufb_Vx_Wx)
453{
454 IEMOP_MNEMONIC2(RM, PSHUFB, pshufb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
455 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
456 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pshufb_u128, iemAImpl_pshufb_u128_fallback));
457
458}
459
460
461/* Opcode 0x0f 0x38 0x01. */
462FNIEMOP_DEF(iemOp_phaddw_Pq_Qq)
463{
464 IEMOP_MNEMONIC2(RM, PHADDW, phaddw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
465 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Ssse3,
466 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddw_u64,&iemAImpl_phaddw_u64_fallback));
467}
468
469
470/** Opcode 0x66 0x0f 0x38 0x01. */
471FNIEMOP_DEF(iemOp_phaddw_Vx_Wx)
472{
473 IEMOP_MNEMONIC2(RM, PHADDW, phaddw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
474 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
475 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddw_u128, iemAImpl_phaddw_u128_fallback));
476
477}
478
479
480/** Opcode 0x0f 0x38 0x02. */
481FNIEMOP_DEF(iemOp_phaddd_Pq_Qq)
482{
483 IEMOP_MNEMONIC2(RM, PHADDD, phaddd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
484 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Ssse3,
485 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddd_u64,&iemAImpl_phaddd_u64_fallback));
486}
487
488
489/** Opcode 0x66 0x0f 0x38 0x02. */
490FNIEMOP_DEF(iemOp_phaddd_Vx_Wx)
491{
492 IEMOP_MNEMONIC2(RM, PHADDD, phaddd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
493 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
494 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddd_u128, iemAImpl_phaddd_u128_fallback));
495
496}
497
498
499/** Opcode 0x0f 0x38 0x03. */
500FNIEMOP_DEF(iemOp_phaddsw_Pq_Qq)
501{
502 IEMOP_MNEMONIC2(RM, PHADDSW, phaddsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
503 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Ssse3,
504 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddsw_u64,&iemAImpl_phaddsw_u64_fallback));
505}
506
507
508/** Opcode 0x66 0x0f 0x38 0x03. */
509FNIEMOP_DEF(iemOp_phaddsw_Vx_Wx)
510{
511 IEMOP_MNEMONIC2(RM, PHADDSW, phaddsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
512 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
513 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddsw_u128, iemAImpl_phaddsw_u128_fallback));
514
515}
516
517
518/** Opcode 0x0f 0x38 0x04. */
519FNIEMOP_DEF(iemOp_pmaddubsw_Pq_Qq)
520{
521 IEMOP_MNEMONIC2(RM, PMADDUBSW, pmaddubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
522 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Ssse3,
523 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pmaddubsw_u64, &iemAImpl_pmaddubsw_u64_fallback));
524}
525
526
527/** Opcode 0x66 0x0f 0x38 0x04. */
528FNIEMOP_DEF(iemOp_pmaddubsw_Vx_Wx)
529{
530 IEMOP_MNEMONIC2(RM, PMADDUBSW, pmaddubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
531 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
532 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pmaddubsw_u128, iemAImpl_pmaddubsw_u128_fallback));
533
534}
535
536
537/** Opcode 0x0f 0x38 0x05. */
538FNIEMOP_DEF(iemOp_phsubw_Pq_Qq)
539{
540 IEMOP_MNEMONIC2(RM, PHSUBW, phsubw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
541 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Ssse3,
542 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubw_u64,&iemAImpl_phsubw_u64_fallback));
543}
544
545
546/** Opcode 0x66 0x0f 0x38 0x05. */
547FNIEMOP_DEF(iemOp_phsubw_Vx_Wx)
548{
549 IEMOP_MNEMONIC2(RM, PHSUBW, phsubw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
550 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
551 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubw_u128, iemAImpl_phsubw_u128_fallback));
552
553}
554
555
556/** Opcode 0x0f 0x38 0x06. */
557FNIEMOP_DEF(iemOp_phsubd_Pq_Qq)
558{
559 IEMOP_MNEMONIC2(RM, PHSUBD, phsubd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
560 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Ssse3,
561 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubd_u64,&iemAImpl_phsubd_u64_fallback));
562}
563
564
565
566/** Opcode 0x66 0x0f 0x38 0x06. */
567FNIEMOP_DEF(iemOp_phsubd_Vx_Wx)
568{
569 IEMOP_MNEMONIC2(RM, PHSUBD, phsubd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
570 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
571 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubd_u128, iemAImpl_phsubd_u128_fallback));
572
573}
574
575
576/** Opcode 0x0f 0x38 0x07. */
577FNIEMOP_DEF(iemOp_phsubsw_Pq_Qq)
578{
579 IEMOP_MNEMONIC2(RM, PHSUBSW, phsubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
580 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Ssse3,
581 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubsw_u64,&iemAImpl_phsubsw_u64_fallback));
582}
583
584
585/** Opcode 0x66 0x0f 0x38 0x07. */
586FNIEMOP_DEF(iemOp_phsubsw_Vx_Wx)
587{
588 IEMOP_MNEMONIC2(RM, PHSUBSW, phsubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
589 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
590 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubsw_u128, iemAImpl_phsubsw_u128_fallback));
591
592}
593
594
595/** Opcode 0x0f 0x38 0x08. */
596FNIEMOP_DEF(iemOp_psignb_Pq_Qq)
597{
598 IEMOP_MNEMONIC2(RM, PSIGNB, psignb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
599 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Ssse3,
600 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignb_u64, &iemAImpl_psignb_u64_fallback));
601}
602
603
604/** Opcode 0x66 0x0f 0x38 0x08. */
605FNIEMOP_DEF(iemOp_psignb_Vx_Wx)
606{
607 IEMOP_MNEMONIC2(RM, PSIGNB, psignb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
608 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
609 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignb_u128, iemAImpl_psignb_u128_fallback));
610
611}
612
613
614/** Opcode 0x0f 0x38 0x09. */
615FNIEMOP_DEF(iemOp_psignw_Pq_Qq)
616{
617 IEMOP_MNEMONIC2(RM, PSIGNW, psignw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
618 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Ssse3,
619 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignw_u64, &iemAImpl_psignw_u64_fallback));
620}
621
622
623/** Opcode 0x66 0x0f 0x38 0x09. */
624FNIEMOP_DEF(iemOp_psignw_Vx_Wx)
625{
626 IEMOP_MNEMONIC2(RM, PSIGNW, psignw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
627 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
628 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignw_u128, iemAImpl_psignw_u128_fallback));
629
630}
631
632
633/** Opcode 0x0f 0x38 0x0a. */
634FNIEMOP_DEF(iemOp_psignd_Pq_Qq)
635{
636 IEMOP_MNEMONIC2(RM, PSIGND, psignd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
637 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Ssse3,
638 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignd_u64, &iemAImpl_psignd_u64_fallback));
639}
640
641
642/** Opcode 0x66 0x0f 0x38 0x0a. */
643FNIEMOP_DEF(iemOp_psignd_Vx_Wx)
644{
645 IEMOP_MNEMONIC2(RM, PSIGND, psignd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
646 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
647 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignd_u128, iemAImpl_psignd_u128_fallback));
648
649}
650
651
652/** Opcode 0x0f 0x38 0x0b. */
653FNIEMOP_DEF(iemOp_pmulhrsw_Pq_Qq)
654{
655 IEMOP_MNEMONIC2(RM, PMULHRSW, pmulhrsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
656 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Ssse3,
657 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pmulhrsw_u64, &iemAImpl_pmulhrsw_u64_fallback));
658}
659
660
661/** Opcode 0x66 0x0f 0x38 0x0b. */
662FNIEMOP_DEF(iemOp_pmulhrsw_Vx_Wx)
663{
664 IEMOP_MNEMONIC2(RM, PMULHRSW, pmulhrsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
665 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
666 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pmulhrsw_u128, iemAImpl_pmulhrsw_u128_fallback));
667
668}
669
670
671/* Opcode 0x0f 0x38 0x0c - invalid. */
672/* Opcode 0x66 0x0f 0x38 0x0c - invalid (vex only). */
673/* Opcode 0x0f 0x38 0x0d - invalid. */
674/* Opcode 0x66 0x0f 0x38 0x0d - invalid (vex only). */
675/* Opcode 0x0f 0x38 0x0e - invalid. */
676/* Opcode 0x66 0x0f 0x38 0x0e - invalid (vex only). */
677/* Opcode 0x0f 0x38 0x0f - invalid. */
678/* Opcode 0x66 0x0f 0x38 0x0f - invalid (vex only). */
679
680
681/* Opcode 0x0f 0x38 0x10 - invalid */
682
683
684/** Body for the *blend* instructions. */
685#define IEMOP_BODY_P_BLEND_X(a_Instr) \
686 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
687 if (IEM_IS_MODRM_REG_MODE(bRm)) \
688 { \
689 /* \
690 * Register, register. \
691 */ \
692 IEM_MC_BEGIN(3, 0); \
693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41); \
694 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
695 IEM_MC_ARG(PCRTUINT128U, puSrc, 1); \
696 IEM_MC_ARG(PCRTUINT128U, puMask, 2); \
697 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
698 IEM_MC_PREPARE_SSE_USAGE(); \
699 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
700 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
701 IEM_MC_REF_XREG_U128_CONST(puMask, 0); \
702 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fSse41, \
703 iemAImpl_ ## a_Instr ## _u128, \
704 iemAImpl_ ## a_Instr ## _u128_fallback), \
705 puDst, puSrc, puMask); \
706 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
707 IEM_MC_END(); \
708 } \
709 else \
710 { \
711 /* \
712 * Register, memory. \
713 */ \
714 IEM_MC_BEGIN(3, 2); \
715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
716 IEM_MC_LOCAL(RTUINT128U, uSrc); \
717 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
718 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1); \
719 IEM_MC_ARG(PCRTUINT128U, puMask, 2); \
720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41); \
722 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
723 IEM_MC_PREPARE_SSE_USAGE(); \
724 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
725 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
726 IEM_MC_REF_XREG_U128_CONST(puMask, 0); \
727 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fSse41, \
728 iemAImpl_ ## a_Instr ## _u128, \
729 iemAImpl_ ## a_Instr ## _u128_fallback), \
730 puDst, puSrc, puMask); \
731 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
732 IEM_MC_END(); \
733 } \
734 (void)0
735
736/** Opcode 0x66 0x0f 0x38 0x10 (legacy only). */
737FNIEMOP_DEF(iemOp_pblendvb_Vdq_Wdq)
738{
739 IEMOP_MNEMONIC2(RM, PBLENDVB, pblendvb, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES); /** @todo RM0 */
740 IEMOP_BODY_P_BLEND_X(pblendvb);
741}
742
743
744/* Opcode 0x0f 0x38 0x11 - invalid */
745/* Opcode 0x66 0x0f 0x38 0x11 - invalid */
746/* Opcode 0x0f 0x38 0x12 - invalid */
747/* Opcode 0x66 0x0f 0x38 0x12 - invalid */
748/* Opcode 0x0f 0x38 0x13 - invalid */
749/* Opcode 0x66 0x0f 0x38 0x13 - invalid (vex only). */
750/* Opcode 0x0f 0x38 0x14 - invalid */
751
752
753/** Opcode 0x66 0x0f 0x38 0x14 (legacy only). */
754FNIEMOP_DEF(iemOp_blendvps_Vdq_Wdq)
755{
756 IEMOP_MNEMONIC2(RM, BLENDVPS, blendvps, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES); /** @todo RM0 */
757 IEMOP_BODY_P_BLEND_X(blendvps);
758}
759
760
761/* Opcode 0x0f 0x38 0x15 - invalid */
762
763
764/** Opcode 0x66 0x0f 0x38 0x15 (legacy only). */
765FNIEMOP_DEF(iemOp_blendvpd_Vdq_Wdq)
766{
767 IEMOP_MNEMONIC2(RM, BLENDVPD, blendvpd, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES); /** @todo RM0 */
768 IEMOP_BODY_P_BLEND_X(blendvpd);
769}
770
771
772/* Opcode 0x0f 0x38 0x16 - invalid */
773/* Opcode 0x66 0x0f 0x38 0x16 - invalid (vex only). */
774/* Opcode 0x0f 0x38 0x17 - invalid */
775
776
777/** Opcode 0x66 0x0f 0x38 0x17 - invalid */
778FNIEMOP_DEF(iemOp_ptest_Vx_Wx)
779{
780 IEMOP_MNEMONIC2(RM, PTEST, ptest, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
781 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
782 if (IEM_IS_MODRM_REG_MODE(bRm))
783 {
784 /*
785 * Register, register.
786 */
787 IEM_MC_BEGIN(3, 0);
788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
789 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
790 IEM_MC_ARG(PCRTUINT128U, puSrc2, 1);
791 IEM_MC_ARG(uint32_t *, pEFlags, 2);
792 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
793 IEM_MC_PREPARE_SSE_USAGE();
794 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
795 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
796 IEM_MC_REF_EFLAGS(pEFlags);
797 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
798 IEM_MC_ADVANCE_RIP_AND_FINISH();
799 IEM_MC_END();
800 }
801 else
802 {
803 /*
804 * Register, memory.
805 */
806 IEM_MC_BEGIN(3, 2);
807 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
808 IEM_MC_LOCAL(RTUINT128U, uSrc2);
809 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 1);
810 IEM_MC_ARG(uint32_t *, pEFlags, 2);
811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
812
813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
815 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
816 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
817
818 IEM_MC_PREPARE_SSE_USAGE();
819 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
820 IEM_MC_REF_EFLAGS(pEFlags);
821 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
822
823 IEM_MC_ADVANCE_RIP_AND_FINISH();
824 IEM_MC_END();
825 }
826}
827
828
829/* Opcode 0x0f 0x38 0x18 - invalid */
830/* Opcode 0x66 0x0f 0x38 0x18 - invalid (vex only). */
831/* Opcode 0x0f 0x38 0x19 - invalid */
832/* Opcode 0x66 0x0f 0x38 0x19 - invalid (vex only). */
833/* Opcode 0x0f 0x38 0x1a - invalid */
834/* Opcode 0x66 0x0f 0x38 0x1a - invalid (vex only). */
835/* Opcode 0x0f 0x38 0x1b - invalid */
836/* Opcode 0x66 0x0f 0x38 0x1b - invalid */
837
838
839/** Opcode 0x0f 0x38 0x1c. */
840FNIEMOP_DEF(iemOp_pabsb_Pq_Qq)
841{
842 IEMOP_MNEMONIC2(RM, PABSB, pabsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
843 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Ssse3,
844 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsb_u64, &iemAImpl_pabsb_u64_fallback));
845}
846
847
848/** Opcode 0x66 0x0f 0x38 0x1c. */
849FNIEMOP_DEF(iemOp_pabsb_Vx_Wx)
850{
851 IEMOP_MNEMONIC2(RM, PABSB, pabsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
852 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
853 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsb_u128, iemAImpl_pabsb_u128_fallback));
854
855}
856
857
858/** Opcode 0x0f 0x38 0x1d. */
859FNIEMOP_DEF(iemOp_pabsw_Pq_Qq)
860{
861 IEMOP_MNEMONIC2(RM, PABSW, pabsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
862 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Ssse3,
863 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsw_u64, &iemAImpl_pabsw_u64_fallback));
864}
865
866
867/** Opcode 0x66 0x0f 0x38 0x1d. */
868FNIEMOP_DEF(iemOp_pabsw_Vx_Wx)
869{
870 IEMOP_MNEMONIC2(RM, PABSW, pabsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
871 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
872 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsw_u128, iemAImpl_pabsw_u128_fallback));
873
874}
875
876
877/** Opcode 0x0f 0x38 0x1e. */
878FNIEMOP_DEF(iemOp_pabsd_Pq_Qq)
879{
880 IEMOP_MNEMONIC2(RM, PABSD, pabsd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
881 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full_Ssse3,
882 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsd_u64, &iemAImpl_pabsd_u64_fallback));
883}
884
885
886/** Opcode 0x66 0x0f 0x38 0x1e. */
887FNIEMOP_DEF(iemOp_pabsd_Vx_Wx)
888{
889 IEMOP_MNEMONIC2(RM, PABSD, pabsd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
890 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
891 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsd_u128, iemAImpl_pabsd_u128_fallback));
892
893}
894
895
896/* Opcode 0x0f 0x38 0x1f - invalid */
897/* Opcode 0x66 0x0f 0x38 0x1f - invalid */
898
899
900/** Body for the pmov{s,z}x* instructions. */
901#define IEMOP_BODY_PMOV_S_Z(a_Instr, a_SrcWidth) \
902 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
903 if (IEM_IS_MODRM_REG_MODE(bRm)) \
904 { \
905 /* \
906 * Register, register. \
907 */ \
908 IEM_MC_BEGIN(2, 0); \
909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41); \
910 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
911 IEM_MC_ARG(uint64_t, uSrc, 1); \
912 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
913 IEM_MC_PREPARE_SSE_USAGE(); \
914 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword */); \
915 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
916 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse41, \
917 iemAImpl_ ## a_Instr ## _u128, \
918 iemAImpl_v ## a_Instr ## _u128_fallback), \
919 puDst, uSrc); \
920 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
921 IEM_MC_END(); \
922 } \
923 else \
924 { \
925 /* \
926 * Register, memory. \
927 */ \
928 IEM_MC_BEGIN(2, 2); \
929 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
930 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
931 IEM_MC_ARG(uint ## a_SrcWidth ## _t, uSrc, 1); \
932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41); \
934 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); \
935 IEM_MC_PREPARE_SSE_USAGE(); \
936 IEM_MC_FETCH_MEM_U## a_SrcWidth (uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
937 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
938 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse41, \
939 iemAImpl_ ## a_Instr ## _u128, \
940 iemAImpl_v ## a_Instr ## _u128_fallback), \
941 puDst, uSrc); \
942 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
943 IEM_MC_END(); \
944 } \
945 (void)0
946
947
948/** Opcode 0x66 0x0f 0x38 0x20. */
949FNIEMOP_DEF(iemOp_pmovsxbw_Vx_UxMq)
950{
951 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
952 IEMOP_MNEMONIC2(RM, PMOVSXBW, pmovsxbw, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
953 IEMOP_BODY_PMOV_S_Z(pmovsxbw, 64);
954}
955
956
957/** Opcode 0x66 0x0f 0x38 0x21. */
958FNIEMOP_DEF(iemOp_pmovsxbd_Vx_UxMd)
959{
960 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
961 IEMOP_MNEMONIC2(RM, PMOVSXBD, pmovsxbd, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
962 IEMOP_BODY_PMOV_S_Z(pmovsxbd, 32);
963}
964
965
966/** Opcode 0x66 0x0f 0x38 0x22. */
967FNIEMOP_DEF(iemOp_pmovsxbq_Vx_UxMw)
968{
969 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
970 IEMOP_MNEMONIC2(RM, PMOVSXBQ, pmovsxbq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
971 IEMOP_BODY_PMOV_S_Z(pmovsxbq, 16);
972}
973
974
975/** Opcode 0x66 0x0f 0x38 0x23. */
976FNIEMOP_DEF(iemOp_pmovsxwd_Vx_UxMq)
977{
978 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
979 IEMOP_MNEMONIC2(RM, PMOVSXWD, pmovsxwd, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
980 IEMOP_BODY_PMOV_S_Z(pmovsxwd, 64);
981}
982
983
984/** Opcode 0x66 0x0f 0x38 0x24. */
985FNIEMOP_DEF(iemOp_pmovsxwq_Vx_UxMd)
986{
987 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
988 IEMOP_MNEMONIC2(RM, PMOVSXWQ, pmovsxwq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
989 IEMOP_BODY_PMOV_S_Z(pmovsxwq, 32);
990}
991
992
993/** Opcode 0x66 0x0f 0x38 0x25. */
994FNIEMOP_DEF(iemOp_pmovsxdq_Vx_UxMq)
995{
996 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
997 IEMOP_MNEMONIC2(RM, PMOVSXDQ, pmovsxdq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
998 IEMOP_BODY_PMOV_S_Z(pmovsxdq, 64);
999}
1000
1001
1002/* Opcode 0x66 0x0f 0x38 0x26 - invalid */
1003/* Opcode 0x66 0x0f 0x38 0x27 - invalid */
1004
1005
1006/** Opcode 0x66 0x0f 0x38 0x28. */
1007FNIEMOP_DEF(iemOp_pmuldq_Vx_Wx)
1008{
1009 IEMOP_MNEMONIC2(RM, PMULDQ, pmuldq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1010 return FNIEMOP_CALL_1(iemOpCommonSse41Opt_FullFull_To_Full,
1011 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmuldq_u128, iemAImpl_pmuldq_u128_fallback));
1012}
1013
1014
1015/** Opcode 0x66 0x0f 0x38 0x29. */
1016FNIEMOP_DEF(iemOp_pcmpeqq_Vx_Wx)
1017{
1018 IEMOP_MNEMONIC2(RM, PCMPEQQ, pcmpeqq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1019 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1020 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pcmpeqq_u128, iemAImpl_pcmpeqq_u128_fallback));
1021}
1022
1023
1024/**
1025 * @opcode 0x2a
1026 * @opcodesub !11 mr/reg
1027 * @oppfx 0x66
1028 * @opcpuid sse4.1
1029 * @opgroup og_sse41_cachect
1030 * @opxcpttype 1
1031 * @optest op1=-1 op2=2 -> op1=2
1032 * @optest op1=0 op2=-42 -> op1=-42
1033 */
1034FNIEMOP_DEF(iemOp_movntdqa_Vdq_Mdq)
1035{
1036 IEMOP_MNEMONIC2(RM_MEM, MOVNTDQA, movntdqa, Vdq_WO, Mdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1037 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1038 if (IEM_IS_MODRM_MEM_MODE(bRm))
1039 {
1040 /* Register, memory. */
1041 IEM_MC_BEGIN(0, 2);
1042 IEM_MC_LOCAL(RTUINT128U, uSrc);
1043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1044
1045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
1047 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1048 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
1049
1050 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1051 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1052
1053 IEM_MC_ADVANCE_RIP_AND_FINISH();
1054 IEM_MC_END();
1055 }
1056
1057 /**
1058 * @opdone
1059 * @opmnemonic ud660f382areg
1060 * @opcode 0x2a
1061 * @opcodesub 11 mr/reg
1062 * @oppfx 0x66
1063 * @opunused immediate
1064 * @opcpuid sse
1065 * @optest ->
1066 */
1067 else
1068 IEMOP_RAISE_INVALID_OPCODE_RET();
1069}
1070
1071
1072/** Opcode 0x66 0x0f 0x38 0x2b. */
1073FNIEMOP_DEF(iemOp_packusdw_Vx_Wx)
1074{
1075 IEMOP_MNEMONIC2(RM, PACKUSDW, packusdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
1076 return FNIEMOP_CALL_1(iemOpCommonSse41Opt_FullFull_To_Full, iemAImpl_packusdw_u128);
1077}
1078
1079
1080/* Opcode 0x66 0x0f 0x38 0x2c - invalid (vex only). */
1081/* Opcode 0x66 0x0f 0x38 0x2d - invalid (vex only). */
1082/* Opcode 0x66 0x0f 0x38 0x2e - invalid (vex only). */
1083/* Opcode 0x66 0x0f 0x38 0x2f - invalid (vex only). */
1084
1085/** Opcode 0x66 0x0f 0x38 0x30. */
1086FNIEMOP_DEF(iemOp_pmovzxbw_Vx_UxMq)
1087{
1088 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1089 IEMOP_MNEMONIC2(RM, PMOVZXBW, pmovzxbw, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1090 IEMOP_BODY_PMOV_S_Z(pmovzxbw, 64);
1091}
1092
1093
1094/** Opcode 0x66 0x0f 0x38 0x31. */
1095FNIEMOP_DEF(iemOp_pmovzxbd_Vx_UxMd)
1096{
1097 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1098 IEMOP_MNEMONIC2(RM, PMOVZXBD, pmovzxbd, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1099 IEMOP_BODY_PMOV_S_Z(pmovzxbd, 32);
1100}
1101
1102
1103/** Opcode 0x66 0x0f 0x38 0x32. */
1104FNIEMOP_DEF(iemOp_pmovzxbq_Vx_UxMw)
1105{
1106 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1107 IEMOP_MNEMONIC2(RM, PMOVZXBQ, pmovzxbq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1108 IEMOP_BODY_PMOV_S_Z(pmovzxbq, 16);
1109}
1110
1111
1112/** Opcode 0x66 0x0f 0x38 0x33. */
1113FNIEMOP_DEF(iemOp_pmovzxwd_Vx_UxMq)
1114{
1115 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1116 IEMOP_MNEMONIC2(RM, PMOVZXWD, pmovzxwd, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1117 IEMOP_BODY_PMOV_S_Z(pmovzxwd, 64);
1118}
1119
1120
1121/** Opcode 0x66 0x0f 0x38 0x34. */
1122FNIEMOP_DEF(iemOp_pmovzxwq_Vx_UxMd)
1123{
1124 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1125 IEMOP_MNEMONIC2(RM, PMOVZXWQ, pmovzxwq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1126 IEMOP_BODY_PMOV_S_Z(pmovzxwq, 32);
1127}
1128
1129
1130/** Opcode 0x66 0x0f 0x38 0x35. */
1131FNIEMOP_DEF(iemOp_pmovzxdq_Vx_UxMq)
1132{
1133 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1134 IEMOP_MNEMONIC2(RM, PMOVZXDQ, pmovzxdq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1135 IEMOP_BODY_PMOV_S_Z(pmovzxdq, 64);
1136}
1137
1138
1139/* Opcode 0x66 0x0f 0x38 0x36 - invalid (vex only). */
1140
1141
1142/** Opcode 0x66 0x0f 0x38 0x37. */
1143FNIEMOP_DEF(iemOp_pcmpgtq_Vx_Wx)
1144{
1145 IEMOP_MNEMONIC2(RM, PCMPGTQ, pcmpgtq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1146 return FNIEMOP_CALL_1(iemOpCommonSse42_FullFull_To_Full,
1147 IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_pcmpgtq_u128, iemAImpl_pcmpgtq_u128_fallback));
1148}
1149
1150
1151/** Opcode 0x66 0x0f 0x38 0x38. */
1152FNIEMOP_DEF(iemOp_pminsb_Vx_Wx)
1153{
1154 IEMOP_MNEMONIC2(RM, PMINSB, pminsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1155 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1156 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminsb_u128, iemAImpl_pminsb_u128_fallback));
1157}
1158
1159
1160/** Opcode 0x66 0x0f 0x38 0x39. */
1161FNIEMOP_DEF(iemOp_pminsd_Vx_Wx)
1162{
1163 IEMOP_MNEMONIC2(RM, PMINSD, pminsd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1164 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1165 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminsd_u128, iemAImpl_pminsd_u128_fallback));
1166}
1167
1168
1169/** Opcode 0x66 0x0f 0x38 0x3a. */
1170FNIEMOP_DEF(iemOp_pminuw_Vx_Wx)
1171{
1172 IEMOP_MNEMONIC2(RM, PMINUW, pminuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1173 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1174 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminuw_u128, iemAImpl_pminuw_u128_fallback));
1175}
1176
1177
1178/** Opcode 0x66 0x0f 0x38 0x3b. */
1179FNIEMOP_DEF(iemOp_pminud_Vx_Wx)
1180{
1181 IEMOP_MNEMONIC2(RM, PMINUD, pminud, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1182 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1183 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminud_u128, iemAImpl_pminud_u128_fallback));
1184}
1185
1186
1187/** Opcode 0x66 0x0f 0x38 0x3c. */
1188FNIEMOP_DEF(iemOp_pmaxsb_Vx_Wx)
1189{
1190 IEMOP_MNEMONIC2(RM, PMAXSB, pmaxsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1191 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1192 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxsb_u128, iemAImpl_pmaxsb_u128_fallback));
1193}
1194
1195
1196/** Opcode 0x66 0x0f 0x38 0x3d. */
1197FNIEMOP_DEF(iemOp_pmaxsd_Vx_Wx)
1198{
1199 IEMOP_MNEMONIC2(RM, PMAXSD, pmaxsd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1200 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1201 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxsd_u128, iemAImpl_pmaxsd_u128_fallback));
1202}
1203
1204
1205/** Opcode 0x66 0x0f 0x38 0x3e. */
1206FNIEMOP_DEF(iemOp_pmaxuw_Vx_Wx)
1207{
1208 IEMOP_MNEMONIC2(RM, PMAXUW, pmaxuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1209 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1210 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxuw_u128, iemAImpl_pmaxuw_u128_fallback));
1211}
1212
1213
1214/** Opcode 0x66 0x0f 0x38 0x3f. */
1215FNIEMOP_DEF(iemOp_pmaxud_Vx_Wx)
1216{
1217 IEMOP_MNEMONIC2(RM, PMAXUD, pmaxud, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1218 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1219 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxud_u128, iemAImpl_pmaxud_u128_fallback));
1220}
1221
1222
1223/** Opcode 0x66 0x0f 0x38 0x40. */
1224FNIEMOP_DEF(iemOp_pmulld_Vx_Wx)
1225{
1226 IEMOP_MNEMONIC2(RM, PMULLD, pmulld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1227 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1228 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmulld_u128, iemAImpl_pmulld_u128_fallback));
1229}
1230
1231
1232/** Opcode 0x66 0x0f 0x38 0x41. */
1233FNIEMOP_DEF(iemOp_phminposuw_Vdq_Wdq)
1234{
1235 IEMOP_MNEMONIC2(RM, PHMINPOSUW, phminposuw, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1236 return FNIEMOP_CALL_1(iemOpCommonSse41Opt_FullFull_To_Full,
1237 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_phminposuw_u128, iemAImpl_phminposuw_u128_fallback));
1238}
1239
1240
1241/* Opcode 0x66 0x0f 0x38 0x42 - invalid. */
1242/* Opcode 0x66 0x0f 0x38 0x43 - invalid. */
1243/* Opcode 0x66 0x0f 0x38 0x44 - invalid. */
1244/* Opcode 0x66 0x0f 0x38 0x45 - invalid (vex only). */
1245/* Opcode 0x66 0x0f 0x38 0x46 - invalid (vex only). */
1246/* Opcode 0x66 0x0f 0x38 0x47 - invalid (vex only). */
1247/* Opcode 0x66 0x0f 0x38 0x48 - invalid. */
1248/* Opcode 0x66 0x0f 0x38 0x49 - invalid. */
1249/* Opcode 0x66 0x0f 0x38 0x4a - invalid. */
1250/* Opcode 0x66 0x0f 0x38 0x4b - invalid. */
1251/* Opcode 0x66 0x0f 0x38 0x4c - invalid. */
1252/* Opcode 0x66 0x0f 0x38 0x4d - invalid. */
1253/* Opcode 0x66 0x0f 0x38 0x4e - invalid. */
1254/* Opcode 0x66 0x0f 0x38 0x4f - invalid. */
1255
1256/* Opcode 0x66 0x0f 0x38 0x50 - invalid. */
1257/* Opcode 0x66 0x0f 0x38 0x51 - invalid. */
1258/* Opcode 0x66 0x0f 0x38 0x52 - invalid. */
1259/* Opcode 0x66 0x0f 0x38 0x53 - invalid. */
1260/* Opcode 0x66 0x0f 0x38 0x54 - invalid. */
1261/* Opcode 0x66 0x0f 0x38 0x55 - invalid. */
1262/* Opcode 0x66 0x0f 0x38 0x56 - invalid. */
1263/* Opcode 0x66 0x0f 0x38 0x57 - invalid. */
1264/* Opcode 0x66 0x0f 0x38 0x58 - invalid (vex only). */
1265/* Opcode 0x66 0x0f 0x38 0x59 - invalid (vex only). */
1266/* Opcode 0x66 0x0f 0x38 0x5a - invalid (vex only). */
1267/* Opcode 0x66 0x0f 0x38 0x5b - invalid. */
1268/* Opcode 0x66 0x0f 0x38 0x5c - invalid. */
1269/* Opcode 0x66 0x0f 0x38 0x5d - invalid. */
1270/* Opcode 0x66 0x0f 0x38 0x5e - invalid. */
1271/* Opcode 0x66 0x0f 0x38 0x5f - invalid. */
1272
1273/* Opcode 0x66 0x0f 0x38 0x60 - invalid. */
1274/* Opcode 0x66 0x0f 0x38 0x61 - invalid. */
1275/* Opcode 0x66 0x0f 0x38 0x62 - invalid. */
1276/* Opcode 0x66 0x0f 0x38 0x63 - invalid. */
1277/* Opcode 0x66 0x0f 0x38 0x64 - invalid. */
1278/* Opcode 0x66 0x0f 0x38 0x65 - invalid. */
1279/* Opcode 0x66 0x0f 0x38 0x66 - invalid. */
1280/* Opcode 0x66 0x0f 0x38 0x67 - invalid. */
1281/* Opcode 0x66 0x0f 0x38 0x68 - invalid. */
1282/* Opcode 0x66 0x0f 0x38 0x69 - invalid. */
1283/* Opcode 0x66 0x0f 0x38 0x6a - invalid. */
1284/* Opcode 0x66 0x0f 0x38 0x6b - invalid. */
1285/* Opcode 0x66 0x0f 0x38 0x6c - invalid. */
1286/* Opcode 0x66 0x0f 0x38 0x6d - invalid. */
1287/* Opcode 0x66 0x0f 0x38 0x6e - invalid. */
1288/* Opcode 0x66 0x0f 0x38 0x6f - invalid. */
1289
1290/* Opcode 0x66 0x0f 0x38 0x70 - invalid. */
1291/* Opcode 0x66 0x0f 0x38 0x71 - invalid. */
1292/* Opcode 0x66 0x0f 0x38 0x72 - invalid. */
1293/* Opcode 0x66 0x0f 0x38 0x73 - invalid. */
1294/* Opcode 0x66 0x0f 0x38 0x74 - invalid. */
1295/* Opcode 0x66 0x0f 0x38 0x75 - invalid. */
1296/* Opcode 0x66 0x0f 0x38 0x76 - invalid. */
1297/* Opcode 0x66 0x0f 0x38 0x77 - invalid. */
1298/* Opcode 0x66 0x0f 0x38 0x78 - invalid (vex only). */
1299/* Opcode 0x66 0x0f 0x38 0x79 - invalid (vex only). */
1300/* Opcode 0x66 0x0f 0x38 0x7a - invalid. */
1301/* Opcode 0x66 0x0f 0x38 0x7b - invalid. */
1302/* Opcode 0x66 0x0f 0x38 0x7c - invalid. */
1303/* Opcode 0x66 0x0f 0x38 0x7d - invalid. */
1304/* Opcode 0x66 0x0f 0x38 0x7e - invalid. */
1305/* Opcode 0x66 0x0f 0x38 0x7f - invalid. */
1306
1307/** Opcode 0x66 0x0f 0x38 0x80. */
1308#ifdef VBOX_WITH_NESTED_HWVIRT_VMX_EPT
1309FNIEMOP_DEF(iemOp_invept_Gy_Mdq)
1310{
1311 IEMOP_MNEMONIC(invept, "invept Gy,Mdq");
1312 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1313 if (IEM_IS_MODRM_MEM_MODE(bRm))
1314 {
1315 /* Register, memory. */
1316 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
1317 {
1318 IEM_MC_BEGIN(3, 0);
1319 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1320 IEM_MC_ARG(RTGCPTR, GCPtrInveptDesc, 1);
1321 IEM_MC_ARG(uint64_t, uInveptType, 2);
1322 IEM_MC_FETCH_GREG_U64(uInveptType, IEM_GET_MODRM_REG(pVCpu, bRm));
1323 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInveptDesc, bRm, 0);
1324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1325 IEMOP_HLP_IN_VMX_OPERATION("invept", kVmxVDiag_Invept);
1326 IEMOP_HLP_VMX_INSTR( "invept", kVmxVDiag_Invept);
1327 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1328 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
1329 iemCImpl_invept, iEffSeg, GCPtrInveptDesc, uInveptType);
1330 IEM_MC_END();
1331 }
1332 else
1333 {
1334 IEM_MC_BEGIN(3, 0);
1335 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1336 IEM_MC_ARG(RTGCPTR, GCPtrInveptDesc, 1);
1337 IEM_MC_ARG(uint32_t, uInveptType, 2);
1338 IEM_MC_FETCH_GREG_U32(uInveptType, IEM_GET_MODRM_REG(pVCpu, bRm));
1339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInveptDesc, bRm, 0);
1340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1341 IEMOP_HLP_IN_VMX_OPERATION("invept", kVmxVDiag_Invept);
1342 IEMOP_HLP_VMX_INSTR( "invept", kVmxVDiag_Invept);
1343 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1344 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
1345 iemCImpl_invept, iEffSeg, GCPtrInveptDesc, uInveptType);
1346 IEM_MC_END();
1347 }
1348 }
1349 Log(("iemOp_invept_Gy_Mdq: invalid encoding -> #UD\n"));
1350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1351 IEMOP_RAISE_INVALID_OPCODE_RET();
1352}
1353#else
1354FNIEMOP_STUB(iemOp_invept_Gy_Mdq);
1355#endif
1356
1357/** Opcode 0x66 0x0f 0x38 0x81. */
1358#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1359FNIEMOP_DEF(iemOp_invvpid_Gy_Mdq)
1360{
1361 IEMOP_MNEMONIC(invvpid, "invvpid Gy,Mdq");
1362 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1363 if (IEM_IS_MODRM_MEM_MODE(bRm))
1364 {
1365 /* Register, memory. */
1366 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
1367 {
1368 IEM_MC_BEGIN(3, 0);
1369 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1370 IEM_MC_ARG(RTGCPTR, GCPtrInvvpidDesc, 1);
1371 IEM_MC_ARG(uint64_t, uInvvpidType, 2);
1372 IEM_MC_FETCH_GREG_U64(uInvvpidType, IEM_GET_MODRM_REG(pVCpu, bRm));
1373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvvpidDesc, bRm, 0);
1374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1375 IEMOP_HLP_IN_VMX_OPERATION("invvpid", kVmxVDiag_Invvpid);
1376 IEMOP_HLP_VMX_INSTR("invvpid", kVmxVDiag_Invvpid);
1377 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1378 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
1379 iemCImpl_invvpid, iEffSeg, GCPtrInvvpidDesc, uInvvpidType);
1380 IEM_MC_END();
1381 }
1382 else
1383 {
1384 IEM_MC_BEGIN(3, 0);
1385 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1386 IEM_MC_ARG(RTGCPTR, GCPtrInvvpidDesc, 1);
1387 IEM_MC_ARG(uint32_t, uInvvpidType, 2);
1388 IEM_MC_FETCH_GREG_U32(uInvvpidType, IEM_GET_MODRM_REG(pVCpu, bRm));
1389 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvvpidDesc, bRm, 0);
1390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1391 IEMOP_HLP_IN_VMX_OPERATION("invvpid", kVmxVDiag_Invvpid);
1392 IEMOP_HLP_VMX_INSTR("invvpid", kVmxVDiag_Invvpid);
1393 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1394 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_STATUS_FLAGS,
1395 iemCImpl_invvpid, iEffSeg, GCPtrInvvpidDesc, uInvvpidType);
1396 IEM_MC_END();
1397 }
1398 }
1399 Log(("iemOp_invvpid_Gy_Mdq: invalid encoding -> #UD\n"));
1400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1401 IEMOP_RAISE_INVALID_OPCODE_RET();
1402}
1403#else
1404FNIEMOP_STUB(iemOp_invvpid_Gy_Mdq);
1405#endif
1406
1407/** Opcode 0x66 0x0f 0x38 0x82. */
1408FNIEMOP_DEF(iemOp_invpcid_Gy_Mdq)
1409{
1410 IEMOP_MNEMONIC(invpcid, "invpcid Gy,Mdq");
1411 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1412 if (IEM_IS_MODRM_MEM_MODE(bRm))
1413 {
1414 /* Register, memory. */
1415 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
1416 {
1417 IEM_MC_BEGIN(3, 0);
1418 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1419 IEM_MC_ARG(RTGCPTR, GCPtrInvpcidDesc, 1);
1420 IEM_MC_ARG(uint64_t, uInvpcidType, 2);
1421 IEM_MC_FETCH_GREG_U64(uInvpcidType, IEM_GET_MODRM_REG(pVCpu, bRm));
1422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvpcidDesc, bRm, 0);
1423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1424 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1425 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, iemCImpl_invpcid, iEffSeg, GCPtrInvpcidDesc, uInvpcidType);
1426 IEM_MC_END();
1427 }
1428 else
1429 {
1430 IEM_MC_BEGIN(3, 0);
1431 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1432 IEM_MC_ARG(RTGCPTR, GCPtrInvpcidDesc, 1);
1433 IEM_MC_ARG(uint32_t, uInvpcidType, 2);
1434 IEM_MC_FETCH_GREG_U32(uInvpcidType, IEM_GET_MODRM_REG(pVCpu, bRm));
1435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvpcidDesc, bRm, 0);
1436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1437 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1438 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_VMEXIT, iemCImpl_invpcid, iEffSeg, GCPtrInvpcidDesc, uInvpcidType);
1439 IEM_MC_END();
1440 }
1441 }
1442 Log(("iemOp_invpcid_Gy_Mdq: invalid encoding -> #UD\n"));
1443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1444 IEMOP_RAISE_INVALID_OPCODE_RET();
1445}
1446
1447
1448/* Opcode 0x66 0x0f 0x38 0x83 - invalid. */
1449/* Opcode 0x66 0x0f 0x38 0x84 - invalid. */
1450/* Opcode 0x66 0x0f 0x38 0x85 - invalid. */
1451/* Opcode 0x66 0x0f 0x38 0x86 - invalid. */
1452/* Opcode 0x66 0x0f 0x38 0x87 - invalid. */
1453/* Opcode 0x66 0x0f 0x38 0x88 - invalid. */
1454/* Opcode 0x66 0x0f 0x38 0x89 - invalid. */
1455/* Opcode 0x66 0x0f 0x38 0x8a - invalid. */
1456/* Opcode 0x66 0x0f 0x38 0x8b - invalid. */
1457/* Opcode 0x66 0x0f 0x38 0x8c - invalid (vex only). */
1458/* Opcode 0x66 0x0f 0x38 0x8d - invalid. */
1459/* Opcode 0x66 0x0f 0x38 0x8e - invalid (vex only). */
1460/* Opcode 0x66 0x0f 0x38 0x8f - invalid. */
1461
1462/* Opcode 0x66 0x0f 0x38 0x90 - invalid (vex only). */
1463/* Opcode 0x66 0x0f 0x38 0x91 - invalid (vex only). */
1464/* Opcode 0x66 0x0f 0x38 0x92 - invalid (vex only). */
1465/* Opcode 0x66 0x0f 0x38 0x93 - invalid (vex only). */
1466/* Opcode 0x66 0x0f 0x38 0x94 - invalid. */
1467/* Opcode 0x66 0x0f 0x38 0x95 - invalid. */
1468/* Opcode 0x66 0x0f 0x38 0x96 - invalid (vex only). */
1469/* Opcode 0x66 0x0f 0x38 0x97 - invalid (vex only). */
1470/* Opcode 0x66 0x0f 0x38 0x98 - invalid (vex only). */
1471/* Opcode 0x66 0x0f 0x38 0x99 - invalid (vex only). */
1472/* Opcode 0x66 0x0f 0x38 0x9a - invalid (vex only). */
1473/* Opcode 0x66 0x0f 0x38 0x9b - invalid (vex only). */
1474/* Opcode 0x66 0x0f 0x38 0x9c - invalid (vex only). */
1475/* Opcode 0x66 0x0f 0x38 0x9d - invalid (vex only). */
1476/* Opcode 0x66 0x0f 0x38 0x9e - invalid (vex only). */
1477/* Opcode 0x66 0x0f 0x38 0x9f - invalid (vex only). */
1478
1479/* Opcode 0x66 0x0f 0x38 0xa0 - invalid. */
1480/* Opcode 0x66 0x0f 0x38 0xa1 - invalid. */
1481/* Opcode 0x66 0x0f 0x38 0xa2 - invalid. */
1482/* Opcode 0x66 0x0f 0x38 0xa3 - invalid. */
1483/* Opcode 0x66 0x0f 0x38 0xa4 - invalid. */
1484/* Opcode 0x66 0x0f 0x38 0xa5 - invalid. */
1485/* Opcode 0x66 0x0f 0x38 0xa6 - invalid (vex only). */
1486/* Opcode 0x66 0x0f 0x38 0xa7 - invalid (vex only). */
1487/* Opcode 0x66 0x0f 0x38 0xa8 - invalid (vex only). */
1488/* Opcode 0x66 0x0f 0x38 0xa9 - invalid (vex only). */
1489/* Opcode 0x66 0x0f 0x38 0xaa - invalid (vex only). */
1490/* Opcode 0x66 0x0f 0x38 0xab - invalid (vex only). */
1491/* Opcode 0x66 0x0f 0x38 0xac - invalid (vex only). */
1492/* Opcode 0x66 0x0f 0x38 0xad - invalid (vex only). */
1493/* Opcode 0x66 0x0f 0x38 0xae - invalid (vex only). */
1494/* Opcode 0x66 0x0f 0x38 0xaf - invalid (vex only). */
1495
1496/* Opcode 0x66 0x0f 0x38 0xb0 - invalid. */
1497/* Opcode 0x66 0x0f 0x38 0xb1 - invalid. */
1498/* Opcode 0x66 0x0f 0x38 0xb2 - invalid. */
1499/* Opcode 0x66 0x0f 0x38 0xb3 - invalid. */
1500/* Opcode 0x66 0x0f 0x38 0xb4 - invalid. */
1501/* Opcode 0x66 0x0f 0x38 0xb5 - invalid. */
1502/* Opcode 0x66 0x0f 0x38 0xb6 - invalid (vex only). */
1503/* Opcode 0x66 0x0f 0x38 0xb7 - invalid (vex only). */
1504/* Opcode 0x66 0x0f 0x38 0xb8 - invalid (vex only). */
1505/* Opcode 0x66 0x0f 0x38 0xb9 - invalid (vex only). */
1506/* Opcode 0x66 0x0f 0x38 0xba - invalid (vex only). */
1507/* Opcode 0x66 0x0f 0x38 0xbb - invalid (vex only). */
1508/* Opcode 0x66 0x0f 0x38 0xbc - invalid (vex only). */
1509/* Opcode 0x66 0x0f 0x38 0xbd - invalid (vex only). */
1510/* Opcode 0x66 0x0f 0x38 0xbe - invalid (vex only). */
1511/* Opcode 0x66 0x0f 0x38 0xbf - invalid (vex only). */
1512
1513/* Opcode 0x0f 0x38 0xc0 - invalid. */
1514/* Opcode 0x66 0x0f 0x38 0xc0 - invalid. */
1515/* Opcode 0x0f 0x38 0xc1 - invalid. */
1516/* Opcode 0x66 0x0f 0x38 0xc1 - invalid. */
1517/* Opcode 0x0f 0x38 0xc2 - invalid. */
1518/* Opcode 0x66 0x0f 0x38 0xc2 - invalid. */
1519/* Opcode 0x0f 0x38 0xc3 - invalid. */
1520/* Opcode 0x66 0x0f 0x38 0xc3 - invalid. */
1521/* Opcode 0x0f 0x38 0xc4 - invalid. */
1522/* Opcode 0x66 0x0f 0x38 0xc4 - invalid. */
1523/* Opcode 0x0f 0x38 0xc5 - invalid. */
1524/* Opcode 0x66 0x0f 0x38 0xc5 - invalid. */
1525/* Opcode 0x0f 0x38 0xc6 - invalid. */
1526/* Opcode 0x66 0x0f 0x38 0xc6 - invalid. */
1527/* Opcode 0x0f 0x38 0xc7 - invalid. */
1528/* Opcode 0x66 0x0f 0x38 0xc7 - invalid. */
1529
1530
1531/** Opcode 0x0f 0x38 0xc8. */
1532FNIEMOP_DEF(iemOp_sha1nexte_Vdq_Wdq)
1533{
1534 IEMOP_MNEMONIC2(RM, SHA1NEXTE, sha1nexte, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1535 return FNIEMOP_CALL_1(iemOpCommonSha_FullFull_To_Full,
1536 IEM_SELECT_HOST_OR_FALLBACK(fSha, iemAImpl_sha1nexte_u128, iemAImpl_sha1nexte_u128_fallback));
1537}
1538
1539
1540/* Opcode 0x66 0x0f 0x38 0xc8 - invalid. */
1541
1542
1543/** Opcode 0x0f 0x38 0xc9. */
1544FNIEMOP_DEF(iemOp_sha1msg1_Vdq_Wdq)
1545{
1546 IEMOP_MNEMONIC2(RM, SHA1MSG1, sha1msg1, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1547 return FNIEMOP_CALL_1(iemOpCommonSha_FullFull_To_Full,
1548 IEM_SELECT_HOST_OR_FALLBACK(fSha, iemAImpl_sha1msg1_u128, iemAImpl_sha1msg1_u128_fallback));
1549}
1550
1551
1552/* Opcode 0x66 0x0f 0x38 0xc9 - invalid. */
1553
1554
1555/** Opcode 0x0f 0x38 0xca. */
1556FNIEMOP_DEF(iemOp_sha1msg2_Vdq_Wdq)
1557{
1558 IEMOP_MNEMONIC2(RM, SHA1MSG2, sha1msg2, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1559 return FNIEMOP_CALL_1(iemOpCommonSha_FullFull_To_Full,
1560 IEM_SELECT_HOST_OR_FALLBACK(fSha, iemAImpl_sha1msg2_u128, iemAImpl_sha1msg2_u128_fallback));
1561}
1562
1563
1564/* Opcode 0x66 0x0f 0x38 0xca - invalid. */
1565
1566
1567/** Opcode 0x0f 0x38 0xcb. */
1568FNIEMOP_DEF(iemOp_sha256rnds2_Vdq_Wdq)
1569{
1570 IEMOP_MNEMONIC2(RM, SHA256RNDS2, sha256rnds2, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES); /** @todo Actually RMI with implicit XMM0 */
1571 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1572 if (IEM_IS_MODRM_REG_MODE(bRm))
1573 {
1574 /*
1575 * Register, register.
1576 */
1577 IEM_MC_BEGIN(3, 0);
1578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSha);
1579 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1580 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1581 IEM_MC_ARG(PCRTUINT128U, puXmm0, 2);
1582 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1583 IEM_MC_PREPARE_SSE_USAGE();
1584 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1585 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1586 IEM_MC_REF_XREG_U128_CONST(puXmm0, 0);
1587 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fSha, iemAImpl_sha256rnds2_u128, iemAImpl_sha256rnds2_u128_fallback),
1588 puDst, puSrc, puXmm0);
1589 IEM_MC_ADVANCE_RIP_AND_FINISH();
1590 IEM_MC_END();
1591 }
1592 else
1593 {
1594 /*
1595 * Register, memory.
1596 */
1597 IEM_MC_BEGIN(3, 2);
1598 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1599 IEM_MC_LOCAL(RTUINT128U, uSrc);
1600 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1601 IEM_MC_ARG(PCRTUINT128U, puXmm0, 2);
1602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1603
1604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSha);
1606 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1607 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1608
1609 IEM_MC_PREPARE_SSE_USAGE();
1610 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1611 IEM_MC_REF_XREG_U128_CONST(puXmm0, 0);
1612 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fSha, iemAImpl_sha256rnds2_u128, iemAImpl_sha256rnds2_u128_fallback),
1613 puDst, puSrc, puXmm0);
1614 IEM_MC_ADVANCE_RIP_AND_FINISH();
1615 IEM_MC_END();
1616 }
1617}
1618
1619
1620/* Opcode 0x66 0x0f 0x38 0xcb - invalid. */
1621
1622
1623/** Opcode 0x0f 0x38 0xcc. */
1624FNIEMOP_DEF(iemOp_sha256msg1_Vdq_Wdq)
1625{
1626 IEMOP_MNEMONIC2(RM, SHA256MSG1, sha256msg1, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1627 return FNIEMOP_CALL_1(iemOpCommonSha_FullFull_To_Full,
1628 IEM_SELECT_HOST_OR_FALLBACK(fSha, iemAImpl_sha256msg1_u128, iemAImpl_sha256msg1_u128_fallback));
1629}
1630
1631
1632/* Opcode 0x66 0x0f 0x38 0xcc - invalid. */
1633
1634
1635/** Opcode 0x0f 0x38 0xcd. */
1636FNIEMOP_DEF(iemOp_sha256msg2_Vdq_Wdq)
1637{
1638 IEMOP_MNEMONIC2(RM, SHA256MSG2, sha256msg2, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1639 return FNIEMOP_CALL_1(iemOpCommonSha_FullFull_To_Full,
1640 IEM_SELECT_HOST_OR_FALLBACK(fSha, iemAImpl_sha256msg2_u128, iemAImpl_sha256msg2_u128_fallback));
1641}
1642
1643
1644/* Opcode 0x66 0x0f 0x38 0xcd - invalid. */
1645/* Opcode 0x0f 0x38 0xce - invalid. */
1646/* Opcode 0x66 0x0f 0x38 0xce - invalid. */
1647/* Opcode 0x0f 0x38 0xcf - invalid. */
1648/* Opcode 0x66 0x0f 0x38 0xcf - invalid. */
1649
1650/* Opcode 0x66 0x0f 0x38 0xd0 - invalid. */
1651/* Opcode 0x66 0x0f 0x38 0xd1 - invalid. */
1652/* Opcode 0x66 0x0f 0x38 0xd2 - invalid. */
1653/* Opcode 0x66 0x0f 0x38 0xd3 - invalid. */
1654/* Opcode 0x66 0x0f 0x38 0xd4 - invalid. */
1655/* Opcode 0x66 0x0f 0x38 0xd5 - invalid. */
1656/* Opcode 0x66 0x0f 0x38 0xd6 - invalid. */
1657/* Opcode 0x66 0x0f 0x38 0xd7 - invalid. */
1658/* Opcode 0x66 0x0f 0x38 0xd8 - invalid. */
1659/* Opcode 0x66 0x0f 0x38 0xd9 - invalid. */
1660/* Opcode 0x66 0x0f 0x38 0xda - invalid. */
1661
1662
1663/** Opcode 0x66 0x0f 0x38 0xdb. */
1664FNIEMOP_DEF(iemOp_aesimc_Vdq_Wdq)
1665{
1666 IEMOP_MNEMONIC2(RM, AESIMC, aesimc, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1667 return FNIEMOP_CALL_1(iemOpCommonAesNi_FullFull_To_Full,
1668 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_aesimc_u128, iemAImpl_aesimc_u128_fallback));
1669}
1670
1671
1672/** Opcode 0x66 0x0f 0x38 0xdc. */
1673FNIEMOP_DEF(iemOp_aesenc_Vdq_Wdq)
1674{
1675 IEMOP_MNEMONIC2(RM, AESENC, aesenc, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1676 return FNIEMOP_CALL_1(iemOpCommonAesNi_FullFull_To_Full,
1677 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_aesenc_u128, iemAImpl_aesenc_u128_fallback));
1678}
1679
1680
1681/** Opcode 0x66 0x0f 0x38 0xdd. */
1682FNIEMOP_DEF(iemOp_aesenclast_Vdq_Wdq)
1683{
1684 IEMOP_MNEMONIC2(RM, AESENCLAST, aesenclast, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1685 return FNIEMOP_CALL_1(iemOpCommonAesNi_FullFull_To_Full,
1686 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_aesenclast_u128, iemAImpl_aesenclast_u128_fallback));
1687}
1688
1689
1690/** Opcode 0x66 0x0f 0x38 0xde. */
1691FNIEMOP_DEF(iemOp_aesdec_Vdq_Wdq)
1692{
1693 IEMOP_MNEMONIC2(RM, AESDEC, aesdec, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1694 return FNIEMOP_CALL_1(iemOpCommonAesNi_FullFull_To_Full,
1695 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_aesdec_u128, iemAImpl_aesdec_u128_fallback));
1696}
1697
1698
1699/** Opcode 0x66 0x0f 0x38 0xdf. */
1700FNIEMOP_DEF(iemOp_aesdeclast_Vdq_Wdq)
1701{
1702 IEMOP_MNEMONIC2(RM, AESDECLAST, aesdeclast, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1703 return FNIEMOP_CALL_1(iemOpCommonAesNi_FullFull_To_Full,
1704 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_aesdeclast_u128, iemAImpl_aesdeclast_u128_fallback));
1705}
1706
1707
1708/* Opcode 0x66 0x0f 0x38 0xe0 - invalid. */
1709/* Opcode 0x66 0x0f 0x38 0xe1 - invalid. */
1710/* Opcode 0x66 0x0f 0x38 0xe2 - invalid. */
1711/* Opcode 0x66 0x0f 0x38 0xe3 - invalid. */
1712/* Opcode 0x66 0x0f 0x38 0xe4 - invalid. */
1713/* Opcode 0x66 0x0f 0x38 0xe5 - invalid. */
1714/* Opcode 0x66 0x0f 0x38 0xe6 - invalid. */
1715/* Opcode 0x66 0x0f 0x38 0xe7 - invalid. */
1716/* Opcode 0x66 0x0f 0x38 0xe8 - invalid. */
1717/* Opcode 0x66 0x0f 0x38 0xe9 - invalid. */
1718/* Opcode 0x66 0x0f 0x38 0xea - invalid. */
1719/* Opcode 0x66 0x0f 0x38 0xeb - invalid. */
1720/* Opcode 0x66 0x0f 0x38 0xec - invalid. */
1721/* Opcode 0x66 0x0f 0x38 0xed - invalid. */
1722/* Opcode 0x66 0x0f 0x38 0xee - invalid. */
1723/* Opcode 0x66 0x0f 0x38 0xef - invalid. */
1724
1725
1726/** Opcode [0x66] 0x0f 0x38 0xf0. */
1727FNIEMOP_DEF(iemOp_movbe_Gv_Mv)
1728{
1729 IEMOP_MNEMONIC2(RM, MOVBE, movbe, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1730 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovBe)
1731 return iemOp_InvalidNeedRM(pVCpu);
1732
1733 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1734 if (!IEM_IS_MODRM_REG_MODE(bRm))
1735 {
1736 /*
1737 * Register, memory.
1738 */
1739 switch (pVCpu->iem.s.enmEffOpSize)
1740 {
1741 case IEMMODE_16BIT:
1742 IEM_MC_BEGIN(0, 2);
1743 IEM_MC_LOCAL(uint16_t, uSrc);
1744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1745
1746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1748 IEM_MC_FETCH_MEM_U16(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1749
1750 IEM_MC_BSWAP_LOCAL_U16(uSrc);
1751 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1752
1753 IEM_MC_ADVANCE_RIP_AND_FINISH();
1754 IEM_MC_END();
1755 break;
1756
1757 case IEMMODE_32BIT:
1758 IEM_MC_BEGIN(0, 2);
1759 IEM_MC_LOCAL(uint32_t, uSrc);
1760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1761
1762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1764 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1765
1766 IEM_MC_BSWAP_LOCAL_U32(uSrc);
1767 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1768
1769 IEM_MC_ADVANCE_RIP_AND_FINISH();
1770 IEM_MC_END();
1771 break;
1772
1773 case IEMMODE_64BIT:
1774 IEM_MC_BEGIN(0, 2);
1775 IEM_MC_LOCAL(uint64_t, uSrc);
1776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1777
1778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1780 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1781
1782 IEM_MC_BSWAP_LOCAL_U64(uSrc);
1783 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1784
1785 IEM_MC_ADVANCE_RIP_AND_FINISH();
1786 IEM_MC_END();
1787 break;
1788
1789 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1790 }
1791 }
1792 else
1793 {
1794 /* Reg/reg not supported. */
1795 IEMOP_RAISE_INVALID_OPCODE_RET();
1796 }
1797}
1798
1799
1800/* Opcode 0xf3 0x0f 0x38 0xf0 - invalid. */
1801
1802
1803/** Opcode 0xf2 0x0f 0x38 0xf0. */
1804FNIEMOP_DEF(iemOp_crc32_Gd_Eb)
1805{
1806 IEMOP_MNEMONIC2(RM, CRC32, crc32, Gd, Eb, DISOPTYPE_HARMLESS, 0);
1807 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse42)
1808 return iemOp_InvalidNeedRM(pVCpu);
1809
1810 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1811 if (IEM_IS_MODRM_REG_MODE(bRm))
1812 {
1813 /*
1814 * Register, register.
1815 */
1816 IEM_MC_BEGIN(2, 0);
1817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1818 IEM_MC_ARG(uint32_t *, puDst, 0);
1819 IEM_MC_ARG(uint8_t, uSrc, 1);
1820 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1821 IEM_MC_FETCH_GREG_U8(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1822 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u8, iemAImpl_crc32_u8_fallback), puDst, uSrc);
1823 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1824 IEM_MC_ADVANCE_RIP_AND_FINISH();
1825 IEM_MC_END();
1826 }
1827 else
1828 {
1829 /*
1830 * Register, memory.
1831 */
1832 IEM_MC_BEGIN(2, 1);
1833 IEM_MC_ARG(uint32_t *, puDst, 0);
1834 IEM_MC_ARG(uint8_t, uSrc, 1);
1835 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1836
1837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1839 IEM_MC_FETCH_MEM_U8(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1840
1841 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1842 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u8, iemAImpl_crc32_u8_fallback), puDst, uSrc);
1843 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1844
1845 IEM_MC_ADVANCE_RIP_AND_FINISH();
1846 IEM_MC_END();
1847 }
1848}
1849
1850
1851/** Opcode [0x66] 0x0f 0x38 0xf1. */
1852FNIEMOP_DEF(iemOp_movbe_Mv_Gv)
1853{
1854 IEMOP_MNEMONIC2(MR, MOVBE, movbe, Ev, Gv, DISOPTYPE_HARMLESS, 0);
1855 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovBe)
1856 return iemOp_InvalidNeedRM(pVCpu);
1857
1858 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1859 if (!IEM_IS_MODRM_REG_MODE(bRm))
1860 {
1861 /*
1862 * Memory, register.
1863 */
1864 switch (pVCpu->iem.s.enmEffOpSize)
1865 {
1866 case IEMMODE_16BIT:
1867 IEM_MC_BEGIN(0, 2);
1868 IEM_MC_LOCAL(uint16_t, u16Value);
1869 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1870 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1872 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
1873 IEM_MC_BSWAP_LOCAL_U16(u16Value);
1874 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
1875 IEM_MC_ADVANCE_RIP_AND_FINISH();
1876 IEM_MC_END();
1877 break;
1878
1879 case IEMMODE_32BIT:
1880 IEM_MC_BEGIN(0, 2);
1881 IEM_MC_LOCAL(uint32_t, u32Value);
1882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1885 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
1886 IEM_MC_BSWAP_LOCAL_U32(u32Value);
1887 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
1888 IEM_MC_ADVANCE_RIP_AND_FINISH();
1889 IEM_MC_END();
1890 break;
1891
1892 case IEMMODE_64BIT:
1893 IEM_MC_BEGIN(0, 2);
1894 IEM_MC_LOCAL(uint64_t, u64Value);
1895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1898 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
1899 IEM_MC_BSWAP_LOCAL_U64(u64Value);
1900 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
1901 IEM_MC_ADVANCE_RIP_AND_FINISH();
1902 IEM_MC_END();
1903 break;
1904
1905 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1906 }
1907 }
1908 else
1909 {
1910 /* Reg/reg not supported. */
1911 IEMOP_RAISE_INVALID_OPCODE_RET();
1912 }
1913}
1914
1915
1916/* Opcode 0xf3 0x0f 0x38 0xf1 - invalid. */
1917
1918
1919/** Opcode 0xf2 0x0f 0x38 0xf1. */
1920FNIEMOP_DEF(iemOp_crc32_Gv_Ev)
1921{
1922 IEMOP_MNEMONIC2(RM, CRC32, crc32, Gd, Ev, DISOPTYPE_HARMLESS, 0);
1923 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse42)
1924 return iemOp_InvalidNeedRM(pVCpu);
1925
1926 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1927 if (IEM_IS_MODRM_REG_MODE(bRm))
1928 {
1929 /*
1930 * Register, register.
1931 */
1932 switch (pVCpu->iem.s.enmEffOpSize)
1933 {
1934 case IEMMODE_16BIT:
1935 IEM_MC_BEGIN(2, 0);
1936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1937 IEM_MC_ARG(uint32_t *, puDst, 0);
1938 IEM_MC_ARG(uint16_t, uSrc, 1);
1939 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1940 IEM_MC_FETCH_GREG_U16(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1941 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u16, iemAImpl_crc32_u16_fallback),
1942 puDst, uSrc);
1943 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1944 IEM_MC_ADVANCE_RIP_AND_FINISH();
1945 IEM_MC_END();
1946 break;
1947
1948 case IEMMODE_32BIT:
1949 IEM_MC_BEGIN(2, 0);
1950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1951 IEM_MC_ARG(uint32_t *, puDst, 0);
1952 IEM_MC_ARG(uint32_t, uSrc, 1);
1953 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1954 IEM_MC_FETCH_GREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1955 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u32, iemAImpl_crc32_u32_fallback),
1956 puDst, uSrc);
1957 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1958 IEM_MC_ADVANCE_RIP_AND_FINISH();
1959 IEM_MC_END();
1960 break;
1961
1962 case IEMMODE_64BIT:
1963 IEM_MC_BEGIN(2, 0);
1964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1965 IEM_MC_ARG(uint32_t *, puDst, 0);
1966 IEM_MC_ARG(uint64_t, uSrc, 1);
1967 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1968 IEM_MC_FETCH_GREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1969 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u64, iemAImpl_crc32_u64_fallback),
1970 puDst, uSrc);
1971 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1972 IEM_MC_ADVANCE_RIP_AND_FINISH();
1973 IEM_MC_END();
1974 break;
1975
1976 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1977 }
1978 }
1979 else
1980 {
1981 /*
1982 * Register, memory.
1983 */
1984 switch (pVCpu->iem.s.enmEffOpSize)
1985 {
1986 case IEMMODE_16BIT:
1987 IEM_MC_BEGIN(2, 1);
1988 IEM_MC_ARG(uint32_t *, puDst, 0);
1989 IEM_MC_ARG(uint16_t, uSrc, 1);
1990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1991
1992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1994 IEM_MC_FETCH_MEM_U16(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1995
1996 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1997 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u16, iemAImpl_crc32_u16_fallback),
1998 puDst, uSrc);
1999 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
2000
2001 IEM_MC_ADVANCE_RIP_AND_FINISH();
2002 IEM_MC_END();
2003 break;
2004
2005 case IEMMODE_32BIT:
2006 IEM_MC_BEGIN(2, 1);
2007 IEM_MC_ARG(uint32_t *, puDst, 0);
2008 IEM_MC_ARG(uint32_t, uSrc, 1);
2009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2010
2011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2013 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2014
2015 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2016 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u32, iemAImpl_crc32_u32_fallback),
2017 puDst, uSrc);
2018 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
2019
2020 IEM_MC_ADVANCE_RIP_AND_FINISH();
2021 IEM_MC_END();
2022 break;
2023
2024 case IEMMODE_64BIT:
2025 IEM_MC_BEGIN(2, 1);
2026 IEM_MC_ARG(uint32_t *, puDst, 0);
2027 IEM_MC_ARG(uint64_t, uSrc, 1);
2028 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2029
2030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2032 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2033
2034 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
2035 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u64, iemAImpl_crc32_u64_fallback),
2036 puDst, uSrc);
2037 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
2038
2039 IEM_MC_ADVANCE_RIP_AND_FINISH();
2040 IEM_MC_END();
2041 break;
2042
2043 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2044 }
2045 }
2046}
2047
2048
2049/* Opcode 0x0f 0x38 0xf2 - invalid (vex only). */
2050/* Opcode 0x66 0x0f 0x38 0xf2 - invalid. */
2051/* Opcode 0xf3 0x0f 0x38 0xf2 - invalid. */
2052/* Opcode 0xf2 0x0f 0x38 0xf2 - invalid. */
2053
2054/* Opcode 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
2055/* Opcode 0x66 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
2056/* Opcode 0xf3 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
2057/* Opcode 0xf2 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
2058
2059/* Opcode 0x0f 0x38 0xf4 - invalid. */
2060/* Opcode 0x66 0x0f 0x38 0xf4 - invalid. */
2061/* Opcode 0xf3 0x0f 0x38 0xf4 - invalid. */
2062/* Opcode 0xf2 0x0f 0x38 0xf4 - invalid. */
2063
2064/* Opcode 0x0f 0x38 0xf5 - invalid (vex only). */
2065/* Opcode 0x66 0x0f 0x38 0xf5 - invalid. */
2066/* Opcode 0xf3 0x0f 0x38 0xf5 - invalid (vex only). */
2067/* Opcode 0xf2 0x0f 0x38 0xf5 - invalid (vex only). */
2068
2069/* Opcode 0x0f 0x38 0xf6 - invalid. */
2070
2071#define ADX_EMIT(a_Variant) \
2072 do \
2073 { \
2074 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAdx) \
2075 return iemOp_InvalidNeedRM(pVCpu); \
2076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
2077 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \
2078 { \
2079 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2080 { \
2081 IEM_MC_BEGIN(3, 0); \
2082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2083 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
2084 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
2085 IEM_MC_ARG(uint64_t, u64Src, 2); \
2086 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2087 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2088 IEM_MC_REF_EFLAGS(pEFlags); \
2089 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAdx, iemAImpl_## a_Variant ##_u64, iemAImpl_## a_Variant ##_u64_fallback), \
2090 pu64Dst, pEFlags, u64Src); \
2091 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2092 IEM_MC_END(); \
2093 } \
2094 else \
2095 { \
2096 IEM_MC_BEGIN(3, 1); \
2097 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
2098 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
2099 IEM_MC_ARG(uint64_t, u64Src, 2); \
2100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
2102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2103 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2104 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2105 IEM_MC_REF_EFLAGS(pEFlags); \
2106 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAdx, iemAImpl_## a_Variant ##_u64, iemAImpl_## a_Variant ##_u64_fallback), \
2107 pu64Dst, pEFlags, u64Src); \
2108 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2109 IEM_MC_END(); \
2110 } \
2111 } \
2112 else \
2113 { \
2114 if (IEM_IS_MODRM_REG_MODE(bRm)) \
2115 { \
2116 IEM_MC_BEGIN(3, 0); \
2117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2118 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
2119 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
2120 IEM_MC_ARG(uint32_t, u32Src, 2); \
2121 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2122 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
2123 IEM_MC_REF_EFLAGS(pEFlags); \
2124 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAdx, iemAImpl_## a_Variant ##_u32, iemAImpl_## a_Variant ##_u32_fallback), \
2125 pu32Dst, pEFlags, u32Src); \
2126 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2127 IEM_MC_END(); \
2128 } \
2129 else \
2130 { \
2131 IEM_MC_BEGIN(3, 1); \
2132 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
2133 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
2134 IEM_MC_ARG(uint32_t, u32Src, 2); \
2135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
2136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); \
2137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
2138 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
2139 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
2140 IEM_MC_REF_EFLAGS(pEFlags); \
2141 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fAdx, iemAImpl_## a_Variant ##_u32, iemAImpl_## a_Variant ##_u32_fallback), \
2142 pu32Dst, pEFlags, u32Src); \
2143 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
2144 IEM_MC_END(); \
2145 } \
2146 } \
2147 } while(0)
2148
2149/** Opcode 0x66 0x0f 0x38 0xf6. */
2150FNIEMOP_DEF(iemOp_adcx_Gy_Ey)
2151{
2152 IEMOP_MNEMONIC2(RM, ADCX, adcx, Gy, Ey, DISOPTYPE_HARMLESS, 0);
2153 ADX_EMIT(adcx);
2154}
2155
2156
2157/** Opcode 0xf3 0x0f 0x38 0xf6. */
2158FNIEMOP_DEF(iemOp_adox_Gy_Ey)
2159{
2160 IEMOP_MNEMONIC2(RM, ADOX, adox, Gy, Ey, DISOPTYPE_HARMLESS, 0);
2161 ADX_EMIT(adox);
2162}
2163
2164
2165/* Opcode 0xf2 0x0f 0x38 0xf6 - invalid (vex only). */
2166
2167/* Opcode 0x0f 0x38 0xf7 - invalid (vex only). */
2168/* Opcode 0x66 0x0f 0x38 0xf7 - invalid (vex only). */
2169/* Opcode 0xf3 0x0f 0x38 0xf7 - invalid (vex only). */
2170/* Opcode 0xf2 0x0f 0x38 0xf7 - invalid (vex only). */
2171
2172/* Opcode 0x0f 0x38 0xf8 - invalid. */
2173/* Opcode 0x66 0x0f 0x38 0xf8 - invalid. */
2174/* Opcode 0xf3 0x0f 0x38 0xf8 - invalid. */
2175/* Opcode 0xf2 0x0f 0x38 0xf8 - invalid. */
2176
2177/* Opcode 0x0f 0x38 0xf9 - invalid. */
2178/* Opcode 0x66 0x0f 0x38 0xf9 - invalid. */
2179/* Opcode 0xf3 0x0f 0x38 0xf9 - invalid. */
2180/* Opcode 0xf2 0x0f 0x38 0xf9 - invalid. */
2181
2182/* Opcode 0x0f 0x38 0xfa - invalid. */
2183/* Opcode 0x66 0x0f 0x38 0xfa - invalid. */
2184/* Opcode 0xf3 0x0f 0x38 0xfa - invalid. */
2185/* Opcode 0xf2 0x0f 0x38 0xfa - invalid. */
2186
2187/* Opcode 0x0f 0x38 0xfb - invalid. */
2188/* Opcode 0x66 0x0f 0x38 0xfb - invalid. */
2189/* Opcode 0xf3 0x0f 0x38 0xfb - invalid. */
2190/* Opcode 0xf2 0x0f 0x38 0xfb - invalid. */
2191
2192/* Opcode 0x0f 0x38 0xfc - invalid. */
2193/* Opcode 0x66 0x0f 0x38 0xfc - invalid. */
2194/* Opcode 0xf3 0x0f 0x38 0xfc - invalid. */
2195/* Opcode 0xf2 0x0f 0x38 0xfc - invalid. */
2196
2197/* Opcode 0x0f 0x38 0xfd - invalid. */
2198/* Opcode 0x66 0x0f 0x38 0xfd - invalid. */
2199/* Opcode 0xf3 0x0f 0x38 0xfd - invalid. */
2200/* Opcode 0xf2 0x0f 0x38 0xfd - invalid. */
2201
2202/* Opcode 0x0f 0x38 0xfe - invalid. */
2203/* Opcode 0x66 0x0f 0x38 0xfe - invalid. */
2204/* Opcode 0xf3 0x0f 0x38 0xfe - invalid. */
2205/* Opcode 0xf2 0x0f 0x38 0xfe - invalid. */
2206
2207/* Opcode 0x0f 0x38 0xff - invalid. */
2208/* Opcode 0x66 0x0f 0x38 0xff - invalid. */
2209/* Opcode 0xf3 0x0f 0x38 0xff - invalid. */
2210/* Opcode 0xf2 0x0f 0x38 0xff - invalid. */
2211
2212
2213/**
2214 * Three byte opcode map, first two bytes are 0x0f 0x38.
2215 * @sa g_apfnVexMap2
2216 */
2217IEM_STATIC const PFNIEMOP g_apfnThreeByte0f38[] =
2218{
2219 /* no prefix, 066h prefix f3h prefix, f2h prefix */
2220 /* 0x00 */ iemOp_pshufb_Pq_Qq, iemOp_pshufb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2221 /* 0x01 */ iemOp_phaddw_Pq_Qq, iemOp_phaddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2222 /* 0x02 */ iemOp_phaddd_Pq_Qq, iemOp_phaddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2223 /* 0x03 */ iemOp_phaddsw_Pq_Qq, iemOp_phaddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2224 /* 0x04 */ iemOp_pmaddubsw_Pq_Qq, iemOp_pmaddubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2225 /* 0x05 */ iemOp_phsubw_Pq_Qq, iemOp_phsubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2226 /* 0x06 */ iemOp_phsubd_Pq_Qq, iemOp_phsubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2227 /* 0x07 */ iemOp_phsubsw_Pq_Qq, iemOp_phsubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2228 /* 0x08 */ iemOp_psignb_Pq_Qq, iemOp_psignb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2229 /* 0x09 */ iemOp_psignw_Pq_Qq, iemOp_psignw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2230 /* 0x0a */ iemOp_psignd_Pq_Qq, iemOp_psignd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2231 /* 0x0b */ iemOp_pmulhrsw_Pq_Qq, iemOp_pmulhrsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2232 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
2233 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
2234 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
2235 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
2236
2237 /* 0x10 */ iemOp_InvalidNeedRM, iemOp_pblendvb_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2238 /* 0x11 */ IEMOP_X4(iemOp_InvalidNeedRM),
2239 /* 0x12 */ IEMOP_X4(iemOp_InvalidNeedRM),
2240 /* 0x13 */ IEMOP_X4(iemOp_InvalidNeedRM),
2241 /* 0x14 */ iemOp_InvalidNeedRM, iemOp_blendvps_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2242 /* 0x15 */ iemOp_InvalidNeedRM, iemOp_blendvpd_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2243 /* 0x16 */ IEMOP_X4(iemOp_InvalidNeedRM),
2244 /* 0x17 */ iemOp_InvalidNeedRM, iemOp_ptest_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2245 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
2246 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
2247 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
2248 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
2249 /* 0x1c */ iemOp_pabsb_Pq_Qq, iemOp_pabsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2250 /* 0x1d */ iemOp_pabsw_Pq_Qq, iemOp_pabsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2251 /* 0x1e */ iemOp_pabsd_Pq_Qq, iemOp_pabsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2252 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
2253
2254 /* 0x20 */ iemOp_InvalidNeedRM, iemOp_pmovsxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2255 /* 0x21 */ iemOp_InvalidNeedRM, iemOp_pmovsxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2256 /* 0x22 */ iemOp_InvalidNeedRM, iemOp_pmovsxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2257 /* 0x23 */ iemOp_InvalidNeedRM, iemOp_pmovsxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2258 /* 0x24 */ iemOp_InvalidNeedRM, iemOp_pmovsxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2259 /* 0x25 */ iemOp_InvalidNeedRM, iemOp_pmovsxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2260 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
2261 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
2262 /* 0x28 */ iemOp_InvalidNeedRM, iemOp_pmuldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2263 /* 0x29 */ iemOp_InvalidNeedRM, iemOp_pcmpeqq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2264 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_movntdqa_Vdq_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2265 /* 0x2b */ iemOp_InvalidNeedRM, iemOp_packusdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2266 /* 0x2c */ IEMOP_X4(iemOp_InvalidNeedRM),
2267 /* 0x2d */ IEMOP_X4(iemOp_InvalidNeedRM),
2268 /* 0x2e */ IEMOP_X4(iemOp_InvalidNeedRM),
2269 /* 0x2f */ IEMOP_X4(iemOp_InvalidNeedRM),
2270
2271 /* 0x30 */ iemOp_InvalidNeedRM, iemOp_pmovzxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2272 /* 0x31 */ iemOp_InvalidNeedRM, iemOp_pmovzxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2273 /* 0x32 */ iemOp_InvalidNeedRM, iemOp_pmovzxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2274 /* 0x33 */ iemOp_InvalidNeedRM, iemOp_pmovzxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2275 /* 0x34 */ iemOp_InvalidNeedRM, iemOp_pmovzxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2276 /* 0x35 */ iemOp_InvalidNeedRM, iemOp_pmovzxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2277 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
2278 /* 0x37 */ iemOp_InvalidNeedRM, iemOp_pcmpgtq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2279 /* 0x38 */ iemOp_InvalidNeedRM, iemOp_pminsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2280 /* 0x39 */ iemOp_InvalidNeedRM, iemOp_pminsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2281 /* 0x3a */ iemOp_InvalidNeedRM, iemOp_pminuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2282 /* 0x3b */ iemOp_InvalidNeedRM, iemOp_pminud_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2283 /* 0x3c */ iemOp_InvalidNeedRM, iemOp_pmaxsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2284 /* 0x3d */ iemOp_InvalidNeedRM, iemOp_pmaxsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2285 /* 0x3e */ iemOp_InvalidNeedRM, iemOp_pmaxuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2286 /* 0x3f */ iemOp_InvalidNeedRM, iemOp_pmaxud_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2287
2288 /* 0x40 */ iemOp_InvalidNeedRM, iemOp_pmulld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2289 /* 0x41 */ iemOp_InvalidNeedRM, iemOp_phminposuw_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2290 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
2291 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
2292 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
2293 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
2294 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
2295 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
2296 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
2297 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
2298 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
2299 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
2300 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
2301 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
2302 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
2303 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
2304
2305 /* 0x50 */ IEMOP_X4(iemOp_InvalidNeedRM),
2306 /* 0x51 */ IEMOP_X4(iemOp_InvalidNeedRM),
2307 /* 0x52 */ IEMOP_X4(iemOp_InvalidNeedRM),
2308 /* 0x53 */ IEMOP_X4(iemOp_InvalidNeedRM),
2309 /* 0x54 */ IEMOP_X4(iemOp_InvalidNeedRM),
2310 /* 0x55 */ IEMOP_X4(iemOp_InvalidNeedRM),
2311 /* 0x56 */ IEMOP_X4(iemOp_InvalidNeedRM),
2312 /* 0x57 */ IEMOP_X4(iemOp_InvalidNeedRM),
2313 /* 0x58 */ IEMOP_X4(iemOp_InvalidNeedRM),
2314 /* 0x59 */ IEMOP_X4(iemOp_InvalidNeedRM),
2315 /* 0x5a */ IEMOP_X4(iemOp_InvalidNeedRM),
2316 /* 0x5b */ IEMOP_X4(iemOp_InvalidNeedRM),
2317 /* 0x5c */ IEMOP_X4(iemOp_InvalidNeedRM),
2318 /* 0x5d */ IEMOP_X4(iemOp_InvalidNeedRM),
2319 /* 0x5e */ IEMOP_X4(iemOp_InvalidNeedRM),
2320 /* 0x5f */ IEMOP_X4(iemOp_InvalidNeedRM),
2321
2322 /* 0x60 */ IEMOP_X4(iemOp_InvalidNeedRM),
2323 /* 0x61 */ IEMOP_X4(iemOp_InvalidNeedRM),
2324 /* 0x62 */ IEMOP_X4(iemOp_InvalidNeedRM),
2325 /* 0x63 */ IEMOP_X4(iemOp_InvalidNeedRM),
2326 /* 0x64 */ IEMOP_X4(iemOp_InvalidNeedRM),
2327 /* 0x65 */ IEMOP_X4(iemOp_InvalidNeedRM),
2328 /* 0x66 */ IEMOP_X4(iemOp_InvalidNeedRM),
2329 /* 0x67 */ IEMOP_X4(iemOp_InvalidNeedRM),
2330 /* 0x68 */ IEMOP_X4(iemOp_InvalidNeedRM),
2331 /* 0x69 */ IEMOP_X4(iemOp_InvalidNeedRM),
2332 /* 0x6a */ IEMOP_X4(iemOp_InvalidNeedRM),
2333 /* 0x6b */ IEMOP_X4(iemOp_InvalidNeedRM),
2334 /* 0x6c */ IEMOP_X4(iemOp_InvalidNeedRM),
2335 /* 0x6d */ IEMOP_X4(iemOp_InvalidNeedRM),
2336 /* 0x6e */ IEMOP_X4(iemOp_InvalidNeedRM),
2337 /* 0x6f */ IEMOP_X4(iemOp_InvalidNeedRM),
2338
2339 /* 0x70 */ IEMOP_X4(iemOp_InvalidNeedRM),
2340 /* 0x71 */ IEMOP_X4(iemOp_InvalidNeedRM),
2341 /* 0x72 */ IEMOP_X4(iemOp_InvalidNeedRM),
2342 /* 0x73 */ IEMOP_X4(iemOp_InvalidNeedRM),
2343 /* 0x74 */ IEMOP_X4(iemOp_InvalidNeedRM),
2344 /* 0x75 */ IEMOP_X4(iemOp_InvalidNeedRM),
2345 /* 0x76 */ IEMOP_X4(iemOp_InvalidNeedRM),
2346 /* 0x77 */ IEMOP_X4(iemOp_InvalidNeedRM),
2347 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
2348 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
2349 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
2350 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
2351 /* 0x7c */ IEMOP_X4(iemOp_InvalidNeedRM),
2352 /* 0x7d */ IEMOP_X4(iemOp_InvalidNeedRM),
2353 /* 0x7e */ IEMOP_X4(iemOp_InvalidNeedRM),
2354 /* 0x7f */ IEMOP_X4(iemOp_InvalidNeedRM),
2355
2356 /* 0x80 */ iemOp_InvalidNeedRM, iemOp_invept_Gy_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2357 /* 0x81 */ iemOp_InvalidNeedRM, iemOp_invvpid_Gy_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2358 /* 0x82 */ iemOp_InvalidNeedRM, iemOp_invpcid_Gy_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2359 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
2360 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
2361 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
2362 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
2363 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
2364 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
2365 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
2366 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
2367 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
2368 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
2369 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
2370 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
2371 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
2372
2373 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
2374 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
2375 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
2376 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
2377 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
2378 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
2379 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
2380 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
2381 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
2382 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
2383 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
2384 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
2385 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
2386 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
2387 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
2388 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
2389
2390 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2391 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2392 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2393 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2394 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2395 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2396 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2397 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2398 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2399 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2400 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
2401 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
2402 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
2403 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
2404 /* 0xae */ IEMOP_X4(iemOp_InvalidNeedRM),
2405 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
2406
2407 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2408 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2409 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2410 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2411 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2412 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2413 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2414 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2415 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2416 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2417 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
2418 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
2419 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
2420 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
2421 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
2422 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
2423
2424 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2425 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2426 /* 0xc2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2427 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2428 /* 0xc4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2429 /* 0xc5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2430 /* 0xc6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2431 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2432 /* 0xc8 */ iemOp_sha1nexte_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2433 /* 0xc9 */ iemOp_sha1msg1_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2434 /* 0xca */ iemOp_sha1msg2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2435 /* 0xcb */ iemOp_sha256rnds2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2436 /* 0xcc */ iemOp_sha256msg1_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2437 /* 0xcd */ iemOp_sha256msg2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2438 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
2439 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
2440
2441 /* 0xd0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2442 /* 0xd1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2443 /* 0xd2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2444 /* 0xd3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2445 /* 0xd4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2446 /* 0xd5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2447 /* 0xd6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2448 /* 0xd7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2449 /* 0xd8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2450 /* 0xd9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2451 /* 0xda */ IEMOP_X4(iemOp_InvalidNeedRM),
2452 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_aesimc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2453 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_aesenc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2454 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_aesenclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2455 /* 0xde */ iemOp_InvalidNeedRM, iemOp_aesdec_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2456 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_aesdeclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2457
2458 /* 0xe0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2459 /* 0xe1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2460 /* 0xe2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2461 /* 0xe3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2462 /* 0xe4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2463 /* 0xe5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2464 /* 0xe6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2465 /* 0xe7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2466 /* 0xe8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2467 /* 0xe9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2468 /* 0xea */ IEMOP_X4(iemOp_InvalidNeedRM),
2469 /* 0xeb */ IEMOP_X4(iemOp_InvalidNeedRM),
2470 /* 0xec */ IEMOP_X4(iemOp_InvalidNeedRM),
2471 /* 0xed */ IEMOP_X4(iemOp_InvalidNeedRM),
2472 /* 0xee */ IEMOP_X4(iemOp_InvalidNeedRM),
2473 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRM),
2474
2475 /* 0xf0 */ iemOp_movbe_Gv_Mv, iemOp_movbe_Gv_Mv, iemOp_InvalidNeedRM, iemOp_crc32_Gd_Eb,
2476 /* 0xf1 */ iemOp_movbe_Mv_Gv, iemOp_movbe_Mv_Gv, iemOp_InvalidNeedRM, iemOp_crc32_Gv_Ev,
2477 /* 0xf2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2478 /* 0xf3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2479 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2480 /* 0xf5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2481 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_adcx_Gy_Ey, iemOp_adox_Gy_Ey, iemOp_InvalidNeedRM,
2482 /* 0xf7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2483 /* 0xf8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2484 /* 0xf9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2485 /* 0xfa */ IEMOP_X4(iemOp_InvalidNeedRM),
2486 /* 0xfb */ IEMOP_X4(iemOp_InvalidNeedRM),
2487 /* 0xfc */ IEMOP_X4(iemOp_InvalidNeedRM),
2488 /* 0xfd */ IEMOP_X4(iemOp_InvalidNeedRM),
2489 /* 0xfe */ IEMOP_X4(iemOp_InvalidNeedRM),
2490 /* 0xff */ IEMOP_X4(iemOp_InvalidNeedRM),
2491};
2492AssertCompile(RT_ELEMENTS(g_apfnThreeByte0f38) == 1024);
2493
2494/** @} */
2495
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette