VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsThree0f38.cpp.h@ 97698

Last change on this file since 97698 was 97543, checked in by vboxsync, 2 years ago

IEM: Modified microcode XMM accessors to fetch specified qword/dword etc.; added PEXTRD instruction.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 87.6 KB
Line 
1/* $Id: IEMAllInstructionsThree0f38.cpp.h 97543 2022-11-15 12:59:28Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 *
5 * @remarks IEMAllInstructionsVexMap2.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2022 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Three byte opcodes with first two bytes 0x0f 0x38
33 * @{
34 */
35
36FNIEMOP_DEF_2(iemOpCommonMmx_FullFull_To_Full_Ex, PFNIEMAIMPLMEDIAF2U64, pfnU64, bool, fSupported); /* in IEMAllInstructionsTwoByteOf.cpp.h */
37
38
39/**
40 * Common worker for SSSE3 instructions on the forms:
41 * pxxx xmm1, xmm2/mem128
42 *
43 * Proper alignment of the 128-bit operand is enforced.
44 * Exceptions type 4. SSSE3 cpuid checks.
45 *
46 * @sa iemOpCommonSse2_FullFull_To_Full
47 */
48FNIEMOP_DEF_1(iemOpCommonSsse3_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
49{
50 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
51 if (IEM_IS_MODRM_REG_MODE(bRm))
52 {
53 /*
54 * Register, register.
55 */
56 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
57 IEM_MC_BEGIN(2, 0);
58 IEM_MC_ARG(PRTUINT128U, puDst, 0);
59 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
60 IEM_MC_MAYBE_RAISE_SSSE3_RELATED_XCPT();
61 IEM_MC_PREPARE_SSE_USAGE();
62 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
63 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
64 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
65 IEM_MC_ADVANCE_RIP_AND_FINISH();
66 IEM_MC_END();
67 }
68 else
69 {
70 /*
71 * Register, memory.
72 */
73 IEM_MC_BEGIN(2, 2);
74 IEM_MC_ARG(PRTUINT128U, puDst, 0);
75 IEM_MC_LOCAL(RTUINT128U, uSrc);
76 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
77 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
78
79 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
80 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
81 IEM_MC_MAYBE_RAISE_SSSE3_RELATED_XCPT();
82 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
83
84 IEM_MC_PREPARE_SSE_USAGE();
85 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
86 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
87
88 IEM_MC_ADVANCE_RIP_AND_FINISH();
89 IEM_MC_END();
90 }
91}
92
93
94/**
95 * Common worker for SSE4.1 instructions on the forms:
96 * pxxx xmm1, xmm2/mem128
97 *
98 * Proper alignment of the 128-bit operand is enforced.
99 * Exceptions type 4. SSE4.1 cpuid checks.
100 *
101 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
102 * iemOpCommonSse42_FullFull_To_Full
103 */
104FNIEMOP_DEF_1(iemOpCommonSse41_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
105{
106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
107 if (IEM_IS_MODRM_REG_MODE(bRm))
108 {
109 /*
110 * Register, register.
111 */
112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
113 IEM_MC_BEGIN(2, 0);
114 IEM_MC_ARG(PRTUINT128U, puDst, 0);
115 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
116 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
117 IEM_MC_PREPARE_SSE_USAGE();
118 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
119 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
120 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
121 IEM_MC_ADVANCE_RIP_AND_FINISH();
122 IEM_MC_END();
123 }
124 else
125 {
126 /*
127 * Register, memory.
128 */
129 IEM_MC_BEGIN(2, 2);
130 IEM_MC_ARG(PRTUINT128U, puDst, 0);
131 IEM_MC_LOCAL(RTUINT128U, uSrc);
132 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
134
135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
137 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
138 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
139
140 IEM_MC_PREPARE_SSE_USAGE();
141 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
142 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
143
144 IEM_MC_ADVANCE_RIP_AND_FINISH();
145 IEM_MC_END();
146 }
147}
148
149
150/**
151 * Common worker for SSE4.1 instructions on the forms:
152 * pxxx xmm1, xmm2/mem128
153 *
154 * Proper alignment of the 128-bit operand is enforced.
155 * Exceptions type 4. SSE4.1 cpuid checks.
156 *
157 * Unlike iemOpCommonSse41_FullFull_To_Full, the @a pfnU128 worker function
158 * takes no FXSAVE state, just the operands.
159 *
160 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
161 * iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse42_FullFull_To_Full
162 */
163FNIEMOP_DEF_1(iemOpCommonSse41Opt_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
164{
165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
166 if (IEM_IS_MODRM_REG_MODE(bRm))
167 {
168 /*
169 * Register, register.
170 */
171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
172 IEM_MC_BEGIN(2, 0);
173 IEM_MC_ARG(PRTUINT128U, puDst, 0);
174 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
175 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
176 IEM_MC_PREPARE_SSE_USAGE();
177 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
178 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
179 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
180 IEM_MC_ADVANCE_RIP_AND_FINISH();
181 IEM_MC_END();
182 }
183 else
184 {
185 /*
186 * Register, memory.
187 */
188 IEM_MC_BEGIN(2, 2);
189 IEM_MC_ARG(PRTUINT128U, puDst, 0);
190 IEM_MC_LOCAL(RTUINT128U, uSrc);
191 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
193
194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
196 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
197 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
198
199 IEM_MC_PREPARE_SSE_USAGE();
200 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
201 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
202
203 IEM_MC_ADVANCE_RIP_AND_FINISH();
204 IEM_MC_END();
205 }
206}
207
208
209/**
210 * Common worker for SSE4.2 instructions on the forms:
211 * pxxx xmm1, xmm2/mem128
212 *
213 * Proper alignment of the 128-bit operand is enforced.
214 * Exceptions type 4. SSE4.2 cpuid checks.
215 *
216 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
217 * iemOpCommonSse41_FullFull_To_Full
218 */
219FNIEMOP_DEF_1(iemOpCommonSse42_FullFull_To_Full, PFNIEMAIMPLMEDIAF2U128, pfnU128)
220{
221 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
222 if (IEM_IS_MODRM_REG_MODE(bRm))
223 {
224 /*
225 * Register, register.
226 */
227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
228 IEM_MC_BEGIN(2, 0);
229 IEM_MC_ARG(PRTUINT128U, puDst, 0);
230 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
231 IEM_MC_MAYBE_RAISE_SSE42_RELATED_XCPT();
232 IEM_MC_PREPARE_SSE_USAGE();
233 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
234 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
235 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
236 IEM_MC_ADVANCE_RIP_AND_FINISH();
237 IEM_MC_END();
238 }
239 else
240 {
241 /*
242 * Register, memory.
243 */
244 IEM_MC_BEGIN(2, 2);
245 IEM_MC_ARG(PRTUINT128U, puDst, 0);
246 IEM_MC_LOCAL(RTUINT128U, uSrc);
247 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
248 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
249
250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
252 IEM_MC_MAYBE_RAISE_SSE42_RELATED_XCPT();
253 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
254
255 IEM_MC_PREPARE_SSE_USAGE();
256 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
257 IEM_MC_CALL_SSE_AIMPL_2(pfnU128, puDst, puSrc);
258
259 IEM_MC_ADVANCE_RIP_AND_FINISH();
260 IEM_MC_END();
261 }
262}
263
264
265/**
266 * Common worker for SSE-style AES-NI instructions of the form:
267 * aesxxx xmm1, xmm2/mem128
268 *
269 * Proper alignment of the 128-bit operand is enforced.
270 * Exceptions type 4. AES-NI cpuid checks.
271 *
272 * Unlike iemOpCommonSse41_FullFull_To_Full, the @a pfnU128 worker function
273 * takes no FXSAVE state, just the operands.
274 *
275 * @sa iemOpCommonSse2_FullFull_To_Full, iemOpCommonSsse3_FullFull_To_Full,
276 * iemOpCommonSse41_FullFull_To_Full, iemOpCommonSse42_FullFull_To_Full
277 */
278FNIEMOP_DEF_1(iemOpCommonAesNi_FullFull_To_Full, PFNIEMAIMPLMEDIAOPTF2U128, pfnU128)
279{
280 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
281 if (IEM_IS_MODRM_REG_MODE(bRm))
282 {
283 /*
284 * Register, register.
285 */
286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
287 IEM_MC_BEGIN(2, 0);
288 IEM_MC_ARG(PRTUINT128U, puDst, 0);
289 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
290 IEM_MC_MAYBE_RAISE_AESNI_RELATED_XCPT();
291 IEM_MC_PREPARE_SSE_USAGE();
292 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
293 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
294 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
295 IEM_MC_ADVANCE_RIP_AND_FINISH();
296 IEM_MC_END();
297 }
298 else
299 {
300 /*
301 * Register, memory.
302 */
303 IEM_MC_BEGIN(2, 2);
304 IEM_MC_ARG(PRTUINT128U, puDst, 0);
305 IEM_MC_LOCAL(RTUINT128U, uSrc);
306 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
308
309 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
311 IEM_MC_MAYBE_RAISE_AESNI_RELATED_XCPT();
312 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
313
314 IEM_MC_PREPARE_SSE_USAGE();
315 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
316 IEM_MC_CALL_VOID_AIMPL_2(pfnU128, puDst, puSrc);
317
318 IEM_MC_ADVANCE_RIP_AND_FINISH();
319 IEM_MC_END();
320 }
321}
322
323
324/** Opcode 0x0f 0x38 0x00. */
325FNIEMOP_DEF(iemOp_pshufb_Pq_Qq)
326{
327 IEMOP_MNEMONIC2(RM, PSHUFB, pshufb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
328 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
329 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pshufb_u64,&iemAImpl_pshufb_u64_fallback),
330 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
331}
332
333
334/** Opcode 0x66 0x0f 0x38 0x00. */
335FNIEMOP_DEF(iemOp_pshufb_Vx_Wx)
336{
337 IEMOP_MNEMONIC2(RM, PSHUFB, pshufb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
338 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
339 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pshufb_u128, iemAImpl_pshufb_u128_fallback));
340
341}
342
343
344/* Opcode 0x0f 0x38 0x01. */
345FNIEMOP_DEF(iemOp_phaddw_Pq_Qq)
346{
347 IEMOP_MNEMONIC2(RM, PHADDW, phaddw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
348 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
349 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddw_u64,&iemAImpl_phaddw_u64_fallback),
350 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
351}
352
353
354/** Opcode 0x66 0x0f 0x38 0x01. */
355FNIEMOP_DEF(iemOp_phaddw_Vx_Wx)
356{
357 IEMOP_MNEMONIC2(RM, PHADDW, phaddw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
358 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
359 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddw_u128, iemAImpl_phaddw_u128_fallback));
360
361}
362
363
364/** Opcode 0x0f 0x38 0x02. */
365FNIEMOP_DEF(iemOp_phaddd_Pq_Qq)
366{
367 IEMOP_MNEMONIC2(RM, PHADDD, phaddd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
368 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
369 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddd_u64,&iemAImpl_phaddd_u64_fallback),
370 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
371}
372
373
374/** Opcode 0x66 0x0f 0x38 0x02. */
375FNIEMOP_DEF(iemOp_phaddd_Vx_Wx)
376{
377 IEMOP_MNEMONIC2(RM, PHADDD, phaddd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
378 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
379 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddd_u128, iemAImpl_phaddd_u128_fallback));
380
381}
382
383
384/** Opcode 0x0f 0x38 0x03. */
385FNIEMOP_DEF(iemOp_phaddsw_Pq_Qq)
386{
387 IEMOP_MNEMONIC2(RM, PHADDSW, phaddsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
388 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
389 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddsw_u64,&iemAImpl_phaddsw_u64_fallback),
390 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
391}
392
393
394/** Opcode 0x66 0x0f 0x38 0x03. */
395FNIEMOP_DEF(iemOp_phaddsw_Vx_Wx)
396{
397 IEMOP_MNEMONIC2(RM, PHADDSW, phaddsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
398 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
399 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phaddsw_u128, iemAImpl_phaddsw_u128_fallback));
400
401}
402
403
404/** Opcode 0x0f 0x38 0x04. */
405FNIEMOP_DEF(iemOp_pmaddubsw_Pq_Qq)
406{
407 IEMOP_MNEMONIC2(RM, PMADDUBSW, pmaddubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
408 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
409 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pmaddubsw_u64, &iemAImpl_pmaddubsw_u64_fallback),
410 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
411}
412
413
414/** Opcode 0x66 0x0f 0x38 0x04. */
415FNIEMOP_DEF(iemOp_pmaddubsw_Vx_Wx)
416{
417 IEMOP_MNEMONIC2(RM, PMADDUBSW, pmaddubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
418 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
419 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pmaddubsw_u128, iemAImpl_pmaddubsw_u128_fallback));
420
421}
422
423
424/** Opcode 0x0f 0x38 0x05. */
425FNIEMOP_DEF(iemOp_phsubw_Pq_Qq)
426{
427 IEMOP_MNEMONIC2(RM, PHSUBW, phsubw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
428 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
429 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubw_u64,&iemAImpl_phsubw_u64_fallback),
430 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
431}
432
433
434/** Opcode 0x66 0x0f 0x38 0x05. */
435FNIEMOP_DEF(iemOp_phsubw_Vx_Wx)
436{
437 IEMOP_MNEMONIC2(RM, PHSUBW, phsubw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
438 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
439 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubw_u128, iemAImpl_phsubw_u128_fallback));
440
441}
442
443
444/** Opcode 0x0f 0x38 0x06. */
445FNIEMOP_DEF(iemOp_phsubd_Pq_Qq)
446{
447 IEMOP_MNEMONIC2(RM, PHSUBD, phsubd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
448 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
449 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubd_u64,&iemAImpl_phsubd_u64_fallback),
450 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
451}
452
453
454
455/** Opcode 0x66 0x0f 0x38 0x06. */
456FNIEMOP_DEF(iemOp_phsubd_Vx_Wx)
457{
458 IEMOP_MNEMONIC2(RM, PHSUBD, phsubd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
459 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
460 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubd_u128, iemAImpl_phsubd_u128_fallback));
461
462}
463
464
465/** Opcode 0x0f 0x38 0x07. */
466FNIEMOP_DEF(iemOp_phsubsw_Pq_Qq)
467{
468 IEMOP_MNEMONIC2(RM, PHSUBSW, phsubsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
469 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
470 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubsw_u64,&iemAImpl_phsubsw_u64_fallback),
471 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
472}
473
474
475/** Opcode 0x66 0x0f 0x38 0x07. */
476FNIEMOP_DEF(iemOp_phsubsw_Vx_Wx)
477{
478 IEMOP_MNEMONIC2(RM, PHSUBSW, phsubsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
479 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
480 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_phsubsw_u128, iemAImpl_phsubsw_u128_fallback));
481
482}
483
484
485/** Opcode 0x0f 0x38 0x08. */
486FNIEMOP_DEF(iemOp_psignb_Pq_Qq)
487{
488 IEMOP_MNEMONIC2(RM, PSIGNB, psignb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
489 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
490 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignb_u64, &iemAImpl_psignb_u64_fallback),
491 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
492}
493
494
495/** Opcode 0x66 0x0f 0x38 0x08. */
496FNIEMOP_DEF(iemOp_psignb_Vx_Wx)
497{
498 IEMOP_MNEMONIC2(RM, PSIGNB, psignb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
499 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
500 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignb_u128, iemAImpl_psignb_u128_fallback));
501
502}
503
504
505/** Opcode 0x0f 0x38 0x09. */
506FNIEMOP_DEF(iemOp_psignw_Pq_Qq)
507{
508 IEMOP_MNEMONIC2(RM, PSIGNW, psignw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
509 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
510 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignw_u64, &iemAImpl_psignw_u64_fallback),
511 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
512}
513
514
515/** Opcode 0x66 0x0f 0x38 0x09. */
516FNIEMOP_DEF(iemOp_psignw_Vx_Wx)
517{
518 IEMOP_MNEMONIC2(RM, PSIGNW, psignw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
519 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
520 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignw_u128, iemAImpl_psignw_u128_fallback));
521
522}
523
524
525/** Opcode 0x0f 0x38 0x0a. */
526FNIEMOP_DEF(iemOp_psignd_Pq_Qq)
527{
528 IEMOP_MNEMONIC2(RM, PSIGND, psignd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
529 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
530 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignd_u64, &iemAImpl_psignd_u64_fallback),
531 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
532}
533
534
535/** Opcode 0x66 0x0f 0x38 0x0a. */
536FNIEMOP_DEF(iemOp_psignd_Vx_Wx)
537{
538 IEMOP_MNEMONIC2(RM, PSIGND, psignd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
539 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
540 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_psignd_u128, iemAImpl_psignd_u128_fallback));
541
542}
543
544
545/** Opcode 0x0f 0x38 0x0b. */
546FNIEMOP_DEF(iemOp_pmulhrsw_Pq_Qq)
547{
548 IEMOP_MNEMONIC2(RM, PMULHRSW, pmulhrsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
549 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
550 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pmulhrsw_u64, &iemAImpl_pmulhrsw_u64_fallback),
551 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
552}
553
554
555/** Opcode 0x66 0x0f 0x38 0x0b. */
556FNIEMOP_DEF(iemOp_pmulhrsw_Vx_Wx)
557{
558 IEMOP_MNEMONIC2(RM, PMULHRSW, pmulhrsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
559 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
560 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pmulhrsw_u128, iemAImpl_pmulhrsw_u128_fallback));
561
562}
563
564
565/* Opcode 0x0f 0x38 0x0c - invalid. */
566/* Opcode 0x66 0x0f 0x38 0x0c - invalid (vex only). */
567/* Opcode 0x0f 0x38 0x0d - invalid. */
568/* Opcode 0x66 0x0f 0x38 0x0d - invalid (vex only). */
569/* Opcode 0x0f 0x38 0x0e - invalid. */
570/* Opcode 0x66 0x0f 0x38 0x0e - invalid (vex only). */
571/* Opcode 0x0f 0x38 0x0f - invalid. */
572/* Opcode 0x66 0x0f 0x38 0x0f - invalid (vex only). */
573
574
575/* Opcode 0x0f 0x38 0x10 - invalid */
576
577
578/** Body for the *blend* instructions. */
579#define IEMOP_BODY_P_BLEND_X(a_Instr) \
580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
581 if (IEM_IS_MODRM_REG_MODE(bRm)) \
582 { \
583 /* \
584 * Register, register. \
585 */ \
586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
587 IEM_MC_BEGIN(3, 0); \
588 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
589 IEM_MC_ARG(PCRTUINT128U, puSrc, 1); \
590 IEM_MC_ARG(PCRTUINT128U, puMask, 2); \
591 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT(); \
592 IEM_MC_PREPARE_SSE_USAGE(); \
593 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
594 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \
595 IEM_MC_REF_XREG_U128_CONST(puMask, 0); \
596 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fSse41, \
597 iemAImpl_ ## a_Instr ## _u128, \
598 iemAImpl_ ## a_Instr ## _u128_fallback), \
599 puDst, puSrc, puMask); \
600 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
601 IEM_MC_END(); \
602 } \
603 else \
604 { \
605 /* \
606 * Register, memory. \
607 */ \
608 IEM_MC_BEGIN(3, 2); \
609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
610 IEM_MC_LOCAL(RTUINT128U, uSrc); \
611 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
612 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1); \
613 IEM_MC_ARG(PCRTUINT128U, puMask, 2); \
614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
616 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT(); \
617 IEM_MC_PREPARE_SSE_USAGE(); \
618 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
619 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
620 IEM_MC_REF_XREG_U128_CONST(puMask, 0); \
621 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fSse41, \
622 iemAImpl_ ## a_Instr ## _u128, \
623 iemAImpl_ ## a_Instr ## _u128_fallback), \
624 puDst, puSrc, puMask); \
625 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
626 IEM_MC_END(); \
627 } \
628 (void)0
629
630/** Opcode 0x66 0x0f 0x38 0x10 (legacy only). */
631FNIEMOP_DEF(iemOp_pblendvb_Vdq_Wdq)
632{
633 IEMOP_MNEMONIC2(RM, PBLENDVB, pblendvb, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES); /** @todo RM0 */
634 IEMOP_BODY_P_BLEND_X(pblendvb);
635}
636
637
638/* Opcode 0x0f 0x38 0x11 - invalid */
639/* Opcode 0x66 0x0f 0x38 0x11 - invalid */
640/* Opcode 0x0f 0x38 0x12 - invalid */
641/* Opcode 0x66 0x0f 0x38 0x12 - invalid */
642/* Opcode 0x0f 0x38 0x13 - invalid */
643/* Opcode 0x66 0x0f 0x38 0x13 - invalid (vex only). */
644/* Opcode 0x0f 0x38 0x14 - invalid */
645
646
647/** Opcode 0x66 0x0f 0x38 0x14 (legacy only). */
648FNIEMOP_DEF(iemOp_blendvps_Vdq_Wdq)
649{
650 IEMOP_MNEMONIC2(RM, BLENDVPS, blendvps, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES); /** @todo RM0 */
651 IEMOP_BODY_P_BLEND_X(blendvps);
652}
653
654
655/* Opcode 0x0f 0x38 0x15 - invalid */
656
657
658/** Opcode 0x66 0x0f 0x38 0x15 (legacy only). */
659FNIEMOP_DEF(iemOp_blendvpd_Vdq_Wdq)
660{
661 IEMOP_MNEMONIC2(RM, BLENDVPD, blendvpd, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES); /** @todo RM0 */
662 IEMOP_BODY_P_BLEND_X(blendvpd);
663}
664
665
666/* Opcode 0x0f 0x38 0x16 - invalid */
667/* Opcode 0x66 0x0f 0x38 0x16 - invalid (vex only). */
668/* Opcode 0x0f 0x38 0x17 - invalid */
669
670
671/** Opcode 0x66 0x0f 0x38 0x17 - invalid */
672FNIEMOP_DEF(iemOp_ptest_Vx_Wx)
673{
674 IEMOP_MNEMONIC2(RM, PTEST, ptest, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
675 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
676 if (IEM_IS_MODRM_REG_MODE(bRm))
677 {
678 /*
679 * Register, register.
680 */
681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
682 IEM_MC_BEGIN(3, 0);
683 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
684 IEM_MC_ARG(PCRTUINT128U, puSrc2, 1);
685 IEM_MC_ARG(uint32_t *, pEFlags, 2);
686 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
687 IEM_MC_PREPARE_SSE_USAGE();
688 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
689 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));
690 IEM_MC_REF_EFLAGS(pEFlags);
691 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
692 IEM_MC_ADVANCE_RIP_AND_FINISH();
693 IEM_MC_END();
694 }
695 else
696 {
697 /*
698 * Register, memory.
699 */
700 IEM_MC_BEGIN(3, 2);
701 IEM_MC_ARG(PCRTUINT128U, puSrc1, 0);
702 IEM_MC_LOCAL(RTUINT128U, uSrc2);
703 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 1);
704 IEM_MC_ARG(uint32_t *, pEFlags, 2);
705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
706
707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
709 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
710 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
711
712 IEM_MC_PREPARE_SSE_USAGE();
713 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));
714 IEM_MC_REF_EFLAGS(pEFlags);
715 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_ptest_u128, puSrc1, puSrc2, pEFlags);
716
717 IEM_MC_ADVANCE_RIP_AND_FINISH();
718 IEM_MC_END();
719 }
720}
721
722
723/* Opcode 0x0f 0x38 0x18 - invalid */
724/* Opcode 0x66 0x0f 0x38 0x18 - invalid (vex only). */
725/* Opcode 0x0f 0x38 0x19 - invalid */
726/* Opcode 0x66 0x0f 0x38 0x19 - invalid (vex only). */
727/* Opcode 0x0f 0x38 0x1a - invalid */
728/* Opcode 0x66 0x0f 0x38 0x1a - invalid (vex only). */
729/* Opcode 0x0f 0x38 0x1b - invalid */
730/* Opcode 0x66 0x0f 0x38 0x1b - invalid */
731
732
733/** Opcode 0x0f 0x38 0x1c. */
734FNIEMOP_DEF(iemOp_pabsb_Pq_Qq)
735{
736 IEMOP_MNEMONIC2(RM, PABSB, pabsb, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
737 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
738 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsb_u64, &iemAImpl_pabsb_u64_fallback),
739 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
740}
741
742
743/** Opcode 0x66 0x0f 0x38 0x1c. */
744FNIEMOP_DEF(iemOp_pabsb_Vx_Wx)
745{
746 IEMOP_MNEMONIC2(RM, PABSB, pabsb, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
747 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
748 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsb_u128, iemAImpl_pabsb_u128_fallback));
749
750}
751
752
753/** Opcode 0x0f 0x38 0x1d. */
754FNIEMOP_DEF(iemOp_pabsw_Pq_Qq)
755{
756 IEMOP_MNEMONIC2(RM, PABSW, pabsw, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
757 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
758 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsw_u64, &iemAImpl_pabsw_u64_fallback),
759 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
760}
761
762
763/** Opcode 0x66 0x0f 0x38 0x1d. */
764FNIEMOP_DEF(iemOp_pabsw_Vx_Wx)
765{
766 IEMOP_MNEMONIC2(RM, PABSW, pabsw, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
767 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
768 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsw_u128, iemAImpl_pabsw_u128_fallback));
769
770}
771
772
773/** Opcode 0x0f 0x38 0x1e. */
774FNIEMOP_DEF(iemOp_pabsd_Pq_Qq)
775{
776 IEMOP_MNEMONIC2(RM, PABSD, pabsd, Pq, Qq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
777 return FNIEMOP_CALL_2(iemOpCommonMmx_FullFull_To_Full_Ex,
778 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsd_u64, &iemAImpl_pabsd_u64_fallback),
779 IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSsse3);
780}
781
782
783/** Opcode 0x66 0x0f 0x38 0x1e. */
784FNIEMOP_DEF(iemOp_pabsd_Vx_Wx)
785{
786 IEMOP_MNEMONIC2(RM, PABSD, pabsd, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
787 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFull_To_Full,
788 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_pabsd_u128, iemAImpl_pabsd_u128_fallback));
789
790}
791
792
793/* Opcode 0x0f 0x38 0x1f - invalid */
794/* Opcode 0x66 0x0f 0x38 0x1f - invalid */
795
796
797/** Body for the pmov{s,z}x* instructions. */
798#define IEMOP_BODY_PMOV_S_Z(a_Instr, a_SrcWidth) \
799 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
800 if (IEM_IS_MODRM_REG_MODE(bRm)) \
801 { \
802 /* \
803 * Register, register. \
804 */ \
805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
806 IEM_MC_BEGIN(2, 0); \
807 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
808 IEM_MC_ARG(uint64_t, uSrc, 1); \
809 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT(); \
810 IEM_MC_PREPARE_SSE_USAGE(); \
811 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword */); \
812 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
813 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse41, \
814 iemAImpl_ ## a_Instr ## _u128, \
815 iemAImpl_v ## a_Instr ## _u128_fallback), \
816 puDst, uSrc); \
817 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
818 IEM_MC_END(); \
819 } \
820 else \
821 { \
822 /* \
823 * Register, memory. \
824 */ \
825 IEM_MC_BEGIN(2, 2); \
826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
827 IEM_MC_ARG(PRTUINT128U, puDst, 0); \
828 IEM_MC_ARG(uint ## a_SrcWidth ## _t, uSrc, 1); \
829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
831 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT(); \
832 IEM_MC_PREPARE_SSE_USAGE(); \
833 IEM_MC_FETCH_MEM_U## a_SrcWidth (uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
834 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
835 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse41, \
836 iemAImpl_ ## a_Instr ## _u128, \
837 iemAImpl_v ## a_Instr ## _u128_fallback), \
838 puDst, uSrc); \
839 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
840 IEM_MC_END(); \
841 } \
842 (void)0
843
844
845/** Opcode 0x66 0x0f 0x38 0x20. */
846FNIEMOP_DEF(iemOp_pmovsxbw_Vx_UxMq)
847{
848 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
849 IEMOP_MNEMONIC2(RM, PMOVSXBW, pmovsxbw, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
850 IEMOP_BODY_PMOV_S_Z(pmovsxbw, 64);
851}
852
853
854/** Opcode 0x66 0x0f 0x38 0x21. */
855FNIEMOP_DEF(iemOp_pmovsxbd_Vx_UxMd)
856{
857 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
858 IEMOP_MNEMONIC2(RM, PMOVSXBD, pmovsxbd, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
859 IEMOP_BODY_PMOV_S_Z(pmovsxbd, 32);
860}
861
862
863/** Opcode 0x66 0x0f 0x38 0x22. */
864FNIEMOP_DEF(iemOp_pmovsxbq_Vx_UxMw)
865{
866 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
867 IEMOP_MNEMONIC2(RM, PMOVSXBQ, pmovsxbq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
868 IEMOP_BODY_PMOV_S_Z(pmovsxbq, 16);
869}
870
871
872/** Opcode 0x66 0x0f 0x38 0x23. */
873FNIEMOP_DEF(iemOp_pmovsxwd_Vx_UxMq)
874{
875 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
876 IEMOP_MNEMONIC2(RM, PMOVSXWD, pmovsxwd, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
877 IEMOP_BODY_PMOV_S_Z(pmovsxwd, 64);
878}
879
880
881/** Opcode 0x66 0x0f 0x38 0x24. */
882FNIEMOP_DEF(iemOp_pmovsxwq_Vx_UxMd)
883{
884 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
885 IEMOP_MNEMONIC2(RM, PMOVSXWQ, pmovsxwq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
886 IEMOP_BODY_PMOV_S_Z(pmovsxwq, 32);
887}
888
889
890/** Opcode 0x66 0x0f 0x38 0x25. */
891FNIEMOP_DEF(iemOp_pmovsxdq_Vx_UxMq)
892{
893 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
894 IEMOP_MNEMONIC2(RM, PMOVSXDQ, pmovsxdq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
895 IEMOP_BODY_PMOV_S_Z(pmovsxdq, 64);
896}
897
898
899/* Opcode 0x66 0x0f 0x38 0x26 - invalid */
900/* Opcode 0x66 0x0f 0x38 0x27 - invalid */
901
902
903/** Opcode 0x66 0x0f 0x38 0x28. */
904FNIEMOP_DEF(iemOp_pmuldq_Vx_Wx)
905{
906 IEMOP_MNEMONIC2(RM, PMULDQ, pmuldq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
907 return FNIEMOP_CALL_1(iemOpCommonSse41Opt_FullFull_To_Full,
908 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmuldq_u128, iemAImpl_pmuldq_u128_fallback));
909}
910
911
912/** Opcode 0x66 0x0f 0x38 0x29. */
913FNIEMOP_DEF(iemOp_pcmpeqq_Vx_Wx)
914{
915 IEMOP_MNEMONIC2(RM, PCMPEQQ, pcmpeqq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
916 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
917 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pcmpeqq_u128, iemAImpl_pcmpeqq_u128_fallback));
918}
919
920
921/**
922 * @opcode 0x2a
923 * @opcodesub !11 mr/reg
924 * @oppfx 0x66
925 * @opcpuid sse4.1
926 * @opgroup og_sse41_cachect
927 * @opxcpttype 1
928 * @optest op1=-1 op2=2 -> op1=2
929 * @optest op1=0 op2=-42 -> op1=-42
930 */
931FNIEMOP_DEF(iemOp_movntdqa_Vdq_Mdq)
932{
933 IEMOP_MNEMONIC2(RM_MEM, MOVNTDQA, movntdqa, Vdq_WO, Mdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
934 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
935 if (IEM_IS_MODRM_MEM_MODE(bRm))
936 {
937 /* Register, memory. */
938 IEM_MC_BEGIN(0, 2);
939 IEM_MC_LOCAL(RTUINT128U, uSrc);
940 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
941
942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
944 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT();
945 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE();
946
947 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
948 IEM_MC_STORE_XREG_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
949
950 IEM_MC_ADVANCE_RIP_AND_FINISH();
951 IEM_MC_END();
952 }
953
954 /**
955 * @opdone
956 * @opmnemonic ud660f382areg
957 * @opcode 0x2a
958 * @opcodesub 11 mr/reg
959 * @oppfx 0x66
960 * @opunused immediate
961 * @opcpuid sse
962 * @optest ->
963 */
964 else
965 return IEMOP_RAISE_INVALID_OPCODE();
966}
967
968
969/** Opcode 0x66 0x0f 0x38 0x2b. */
970FNIEMOP_DEF(iemOp_packusdw_Vx_Wx)
971{
972 IEMOP_MNEMONIC2(RM, PACKUSDW, packusdw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, 0);
973 return FNIEMOP_CALL_1(iemOpCommonSse41Opt_FullFull_To_Full, iemAImpl_packusdw_u128);
974}
975
976
977/* Opcode 0x66 0x0f 0x38 0x2c - invalid (vex only). */
978/* Opcode 0x66 0x0f 0x38 0x2d - invalid (vex only). */
979/* Opcode 0x66 0x0f 0x38 0x2e - invalid (vex only). */
980/* Opcode 0x66 0x0f 0x38 0x2f - invalid (vex only). */
981
982/** Opcode 0x66 0x0f 0x38 0x30. */
983FNIEMOP_DEF(iemOp_pmovzxbw_Vx_UxMq)
984{
985 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
986 IEMOP_MNEMONIC2(RM, PMOVZXBW, pmovzxbw, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
987 IEMOP_BODY_PMOV_S_Z(pmovzxbw, 64);
988}
989
990
991/** Opcode 0x66 0x0f 0x38 0x31. */
992FNIEMOP_DEF(iemOp_pmovzxbd_Vx_UxMd)
993{
994 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
995 IEMOP_MNEMONIC2(RM, PMOVZXBD, pmovzxbd, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
996 IEMOP_BODY_PMOV_S_Z(pmovzxbd, 32);
997}
998
999
1000/** Opcode 0x66 0x0f 0x38 0x32. */
1001FNIEMOP_DEF(iemOp_pmovzxbq_Vx_UxMw)
1002{
1003 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1004 IEMOP_MNEMONIC2(RM, PMOVZXBQ, pmovzxbq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1005 IEMOP_BODY_PMOV_S_Z(pmovzxbq, 16);
1006}
1007
1008
1009/** Opcode 0x66 0x0f 0x38 0x33. */
1010FNIEMOP_DEF(iemOp_pmovzxwd_Vx_UxMq)
1011{
1012 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1013 IEMOP_MNEMONIC2(RM, PMOVZXWD, pmovzxwd, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1014 IEMOP_BODY_PMOV_S_Z(pmovzxwd, 64);
1015}
1016
1017
1018/** Opcode 0x66 0x0f 0x38 0x34. */
1019FNIEMOP_DEF(iemOp_pmovzxwq_Vx_UxMd)
1020{
1021 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1022 IEMOP_MNEMONIC2(RM, PMOVZXWQ, pmovzxwq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1023 IEMOP_BODY_PMOV_S_Z(pmovzxwq, 32);
1024}
1025
1026
1027/** Opcode 0x66 0x0f 0x38 0x35. */
1028FNIEMOP_DEF(iemOp_pmovzxdq_Vx_UxMq)
1029{
1030 /** @todo r=aeichner Review code, the naming of this function and the parameter type specifiers. */
1031 IEMOP_MNEMONIC2(RM, PMOVZXDQ, pmovzxdq, Vx, Wq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1032 IEMOP_BODY_PMOV_S_Z(pmovzxdq, 64);
1033}
1034
1035
1036/* Opcode 0x66 0x0f 0x38 0x36 - invalid (vex only). */
1037
1038
1039/** Opcode 0x66 0x0f 0x38 0x37. */
1040FNIEMOP_DEF(iemOp_pcmpgtq_Vx_Wx)
1041{
1042 IEMOP_MNEMONIC2(RM, PCMPGTQ, pcmpgtq, Vx, Wx, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1043 return FNIEMOP_CALL_1(iemOpCommonSse42_FullFull_To_Full,
1044 IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_pcmpgtq_u128, iemAImpl_pcmpgtq_u128_fallback));
1045}
1046
1047
1048/** Opcode 0x66 0x0f 0x38 0x38. */
1049FNIEMOP_DEF(iemOp_pminsb_Vx_Wx)
1050{
1051 IEMOP_MNEMONIC2(RM, PMINSB, pminsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1052 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1053 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminsb_u128, iemAImpl_pminsb_u128_fallback));
1054}
1055
1056
1057/** Opcode 0x66 0x0f 0x38 0x39. */
1058FNIEMOP_DEF(iemOp_pminsd_Vx_Wx)
1059{
1060 IEMOP_MNEMONIC2(RM, PMINSD, pminsd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1061 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1062 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminsd_u128, iemAImpl_pminsd_u128_fallback));
1063}
1064
1065
1066/** Opcode 0x66 0x0f 0x38 0x3a. */
1067FNIEMOP_DEF(iemOp_pminuw_Vx_Wx)
1068{
1069 IEMOP_MNEMONIC2(RM, PMINUW, pminuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1070 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1071 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminuw_u128, iemAImpl_pminuw_u128_fallback));
1072}
1073
1074
1075/** Opcode 0x66 0x0f 0x38 0x3b. */
1076FNIEMOP_DEF(iemOp_pminud_Vx_Wx)
1077{
1078 IEMOP_MNEMONIC2(RM, PMINUD, pminud, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1079 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1080 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pminud_u128, iemAImpl_pminud_u128_fallback));
1081}
1082
1083
1084/** Opcode 0x66 0x0f 0x38 0x3c. */
1085FNIEMOP_DEF(iemOp_pmaxsb_Vx_Wx)
1086{
1087 IEMOP_MNEMONIC2(RM, PMAXSB, pmaxsb, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1088 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1089 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxsb_u128, iemAImpl_pmaxsb_u128_fallback));
1090}
1091
1092
1093/** Opcode 0x66 0x0f 0x38 0x3d. */
1094FNIEMOP_DEF(iemOp_pmaxsd_Vx_Wx)
1095{
1096 IEMOP_MNEMONIC2(RM, PMAXSD, pmaxsd, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1097 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1098 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxsd_u128, iemAImpl_pmaxsd_u128_fallback));
1099}
1100
1101
1102/** Opcode 0x66 0x0f 0x38 0x3e. */
1103FNIEMOP_DEF(iemOp_pmaxuw_Vx_Wx)
1104{
1105 IEMOP_MNEMONIC2(RM, PMAXUW, pmaxuw, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1106 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1107 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxuw_u128, iemAImpl_pmaxuw_u128_fallback));
1108}
1109
1110
1111/** Opcode 0x66 0x0f 0x38 0x3f. */
1112FNIEMOP_DEF(iemOp_pmaxud_Vx_Wx)
1113{
1114 IEMOP_MNEMONIC2(RM, PMAXUD, pmaxud, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1115 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1116 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmaxud_u128, iemAImpl_pmaxud_u128_fallback));
1117}
1118
1119
1120/** Opcode 0x66 0x0f 0x38 0x40. */
1121FNIEMOP_DEF(iemOp_pmulld_Vx_Wx)
1122{
1123 IEMOP_MNEMONIC2(RM, PMULLD, pmulld, Vx, Wx, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1124 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFull_To_Full,
1125 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pmulld_u128, iemAImpl_pmulld_u128_fallback));
1126}
1127
1128
1129/** Opcode 0x66 0x0f 0x38 0x41. */
1130FNIEMOP_DEF(iemOp_phminposuw_Vdq_Wdq)
1131{
1132 IEMOP_MNEMONIC2(RM, PHMINPOSUW, phminposuw, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1133 return FNIEMOP_CALL_1(iemOpCommonSse41Opt_FullFull_To_Full,
1134 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_phminposuw_u128, iemAImpl_phminposuw_u128_fallback));
1135}
1136
1137
1138/* Opcode 0x66 0x0f 0x38 0x42 - invalid. */
1139/* Opcode 0x66 0x0f 0x38 0x43 - invalid. */
1140/* Opcode 0x66 0x0f 0x38 0x44 - invalid. */
1141/* Opcode 0x66 0x0f 0x38 0x45 - invalid (vex only). */
1142/* Opcode 0x66 0x0f 0x38 0x46 - invalid (vex only). */
1143/* Opcode 0x66 0x0f 0x38 0x47 - invalid (vex only). */
1144/* Opcode 0x66 0x0f 0x38 0x48 - invalid. */
1145/* Opcode 0x66 0x0f 0x38 0x49 - invalid. */
1146/* Opcode 0x66 0x0f 0x38 0x4a - invalid. */
1147/* Opcode 0x66 0x0f 0x38 0x4b - invalid. */
1148/* Opcode 0x66 0x0f 0x38 0x4c - invalid. */
1149/* Opcode 0x66 0x0f 0x38 0x4d - invalid. */
1150/* Opcode 0x66 0x0f 0x38 0x4e - invalid. */
1151/* Opcode 0x66 0x0f 0x38 0x4f - invalid. */
1152
1153/* Opcode 0x66 0x0f 0x38 0x50 - invalid. */
1154/* Opcode 0x66 0x0f 0x38 0x51 - invalid. */
1155/* Opcode 0x66 0x0f 0x38 0x52 - invalid. */
1156/* Opcode 0x66 0x0f 0x38 0x53 - invalid. */
1157/* Opcode 0x66 0x0f 0x38 0x54 - invalid. */
1158/* Opcode 0x66 0x0f 0x38 0x55 - invalid. */
1159/* Opcode 0x66 0x0f 0x38 0x56 - invalid. */
1160/* Opcode 0x66 0x0f 0x38 0x57 - invalid. */
1161/* Opcode 0x66 0x0f 0x38 0x58 - invalid (vex only). */
1162/* Opcode 0x66 0x0f 0x38 0x59 - invalid (vex only). */
1163/* Opcode 0x66 0x0f 0x38 0x5a - invalid (vex only). */
1164/* Opcode 0x66 0x0f 0x38 0x5b - invalid. */
1165/* Opcode 0x66 0x0f 0x38 0x5c - invalid. */
1166/* Opcode 0x66 0x0f 0x38 0x5d - invalid. */
1167/* Opcode 0x66 0x0f 0x38 0x5e - invalid. */
1168/* Opcode 0x66 0x0f 0x38 0x5f - invalid. */
1169
1170/* Opcode 0x66 0x0f 0x38 0x60 - invalid. */
1171/* Opcode 0x66 0x0f 0x38 0x61 - invalid. */
1172/* Opcode 0x66 0x0f 0x38 0x62 - invalid. */
1173/* Opcode 0x66 0x0f 0x38 0x63 - invalid. */
1174/* Opcode 0x66 0x0f 0x38 0x64 - invalid. */
1175/* Opcode 0x66 0x0f 0x38 0x65 - invalid. */
1176/* Opcode 0x66 0x0f 0x38 0x66 - invalid. */
1177/* Opcode 0x66 0x0f 0x38 0x67 - invalid. */
1178/* Opcode 0x66 0x0f 0x38 0x68 - invalid. */
1179/* Opcode 0x66 0x0f 0x38 0x69 - invalid. */
1180/* Opcode 0x66 0x0f 0x38 0x6a - invalid. */
1181/* Opcode 0x66 0x0f 0x38 0x6b - invalid. */
1182/* Opcode 0x66 0x0f 0x38 0x6c - invalid. */
1183/* Opcode 0x66 0x0f 0x38 0x6d - invalid. */
1184/* Opcode 0x66 0x0f 0x38 0x6e - invalid. */
1185/* Opcode 0x66 0x0f 0x38 0x6f - invalid. */
1186
1187/* Opcode 0x66 0x0f 0x38 0x70 - invalid. */
1188/* Opcode 0x66 0x0f 0x38 0x71 - invalid. */
1189/* Opcode 0x66 0x0f 0x38 0x72 - invalid. */
1190/* Opcode 0x66 0x0f 0x38 0x73 - invalid. */
1191/* Opcode 0x66 0x0f 0x38 0x74 - invalid. */
1192/* Opcode 0x66 0x0f 0x38 0x75 - invalid. */
1193/* Opcode 0x66 0x0f 0x38 0x76 - invalid. */
1194/* Opcode 0x66 0x0f 0x38 0x77 - invalid. */
1195/* Opcode 0x66 0x0f 0x38 0x78 - invalid (vex only). */
1196/* Opcode 0x66 0x0f 0x38 0x79 - invalid (vex only). */
1197/* Opcode 0x66 0x0f 0x38 0x7a - invalid. */
1198/* Opcode 0x66 0x0f 0x38 0x7b - invalid. */
1199/* Opcode 0x66 0x0f 0x38 0x7c - invalid. */
1200/* Opcode 0x66 0x0f 0x38 0x7d - invalid. */
1201/* Opcode 0x66 0x0f 0x38 0x7e - invalid. */
1202/* Opcode 0x66 0x0f 0x38 0x7f - invalid. */
1203
1204/** Opcode 0x66 0x0f 0x38 0x80. */
1205#ifdef VBOX_WITH_NESTED_HWVIRT_VMX_EPT
1206FNIEMOP_DEF(iemOp_invept_Gy_Mdq)
1207{
1208 IEMOP_MNEMONIC(invept, "invept Gy,Mdq");
1209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1210 IEMOP_HLP_IN_VMX_OPERATION("invept", kVmxVDiag_Invept);
1211 IEMOP_HLP_VMX_INSTR("invept", kVmxVDiag_Invept);
1212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1213 if (IEM_IS_MODRM_MEM_MODE(bRm))
1214 {
1215 /* Register, memory. */
1216 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
1217 {
1218 IEM_MC_BEGIN(3, 0);
1219 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1220 IEM_MC_ARG(RTGCPTR, GCPtrInveptDesc, 1);
1221 IEM_MC_ARG(uint64_t, uInveptType, 2);
1222 IEM_MC_FETCH_GREG_U64(uInveptType, IEM_GET_MODRM_REG(pVCpu, bRm));
1223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInveptDesc, bRm, 0);
1224 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1225 IEM_MC_CALL_CIMPL_3(iemCImpl_invept, iEffSeg, GCPtrInveptDesc, uInveptType);
1226 IEM_MC_END();
1227 }
1228 else
1229 {
1230 IEM_MC_BEGIN(3, 0);
1231 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1232 IEM_MC_ARG(RTGCPTR, GCPtrInveptDesc, 1);
1233 IEM_MC_ARG(uint32_t, uInveptType, 2);
1234 IEM_MC_FETCH_GREG_U32(uInveptType, IEM_GET_MODRM_REG(pVCpu, bRm));
1235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInveptDesc, bRm, 0);
1236 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1237 IEM_MC_CALL_CIMPL_3(iemCImpl_invept, iEffSeg, GCPtrInveptDesc, uInveptType);
1238 IEM_MC_END();
1239 }
1240 }
1241 Log(("iemOp_invept_Gy_Mdq: invalid encoding -> #UD\n"));
1242 return IEMOP_RAISE_INVALID_OPCODE();
1243}
1244#else
1245FNIEMOP_STUB(iemOp_invept_Gy_Mdq);
1246#endif
1247
1248/** Opcode 0x66 0x0f 0x38 0x81. */
1249#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1250FNIEMOP_DEF(iemOp_invvpid_Gy_Mdq)
1251{
1252 IEMOP_MNEMONIC(invvpid, "invvpid Gy,Mdq");
1253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1254 IEMOP_HLP_IN_VMX_OPERATION("invvpid", kVmxVDiag_Invvpid);
1255 IEMOP_HLP_VMX_INSTR("invvpid", kVmxVDiag_Invvpid);
1256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1257 if (IEM_IS_MODRM_MEM_MODE(bRm))
1258 {
1259 /* Register, memory. */
1260 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
1261 {
1262 IEM_MC_BEGIN(3, 0);
1263 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1264 IEM_MC_ARG(RTGCPTR, GCPtrInvvpidDesc, 1);
1265 IEM_MC_ARG(uint64_t, uInvvpidType, 2);
1266 IEM_MC_FETCH_GREG_U64(uInvvpidType, IEM_GET_MODRM_REG(pVCpu, bRm));
1267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvvpidDesc, bRm, 0);
1268 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1269 IEM_MC_CALL_CIMPL_3(iemCImpl_invvpid, iEffSeg, GCPtrInvvpidDesc, uInvvpidType);
1270 IEM_MC_END();
1271 }
1272 else
1273 {
1274 IEM_MC_BEGIN(3, 0);
1275 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1276 IEM_MC_ARG(RTGCPTR, GCPtrInvvpidDesc, 1);
1277 IEM_MC_ARG(uint32_t, uInvvpidType, 2);
1278 IEM_MC_FETCH_GREG_U32(uInvvpidType, IEM_GET_MODRM_REG(pVCpu, bRm));
1279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvvpidDesc, bRm, 0);
1280 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1281 IEM_MC_CALL_CIMPL_3(iemCImpl_invvpid, iEffSeg, GCPtrInvvpidDesc, uInvvpidType);
1282 IEM_MC_END();
1283 }
1284 }
1285 Log(("iemOp_invvpid_Gy_Mdq: invalid encoding -> #UD\n"));
1286 return IEMOP_RAISE_INVALID_OPCODE();
1287}
1288#else
1289FNIEMOP_STUB(iemOp_invvpid_Gy_Mdq);
1290#endif
1291
1292/** Opcode 0x66 0x0f 0x38 0x82. */
1293FNIEMOP_DEF(iemOp_invpcid_Gy_Mdq)
1294{
1295 IEMOP_MNEMONIC(invpcid, "invpcid Gy,Mdq");
1296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1297 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1298 if (IEM_IS_MODRM_MEM_MODE(bRm))
1299 {
1300 /* Register, memory. */
1301 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT)
1302 {
1303 IEM_MC_BEGIN(3, 0);
1304 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1305 IEM_MC_ARG(RTGCPTR, GCPtrInvpcidDesc, 1);
1306 IEM_MC_ARG(uint64_t, uInvpcidType, 2);
1307 IEM_MC_FETCH_GREG_U64(uInvpcidType, IEM_GET_MODRM_REG(pVCpu, bRm));
1308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvpcidDesc, bRm, 0);
1309 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1310 IEM_MC_CALL_CIMPL_3(iemCImpl_invpcid, iEffSeg, GCPtrInvpcidDesc, uInvpcidType);
1311 IEM_MC_END();
1312 }
1313 else
1314 {
1315 IEM_MC_BEGIN(3, 0);
1316 IEM_MC_ARG(uint8_t, iEffSeg, 0);
1317 IEM_MC_ARG(RTGCPTR, GCPtrInvpcidDesc, 1);
1318 IEM_MC_ARG(uint32_t, uInvpcidType, 2);
1319 IEM_MC_FETCH_GREG_U32(uInvpcidType, IEM_GET_MODRM_REG(pVCpu, bRm));
1320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrInvpcidDesc, bRm, 0);
1321 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
1322 IEM_MC_CALL_CIMPL_3(iemCImpl_invpcid, iEffSeg, GCPtrInvpcidDesc, uInvpcidType);
1323 IEM_MC_END();
1324 }
1325 }
1326 Log(("iemOp_invpcid_Gy_Mdq: invalid encoding -> #UD\n"));
1327 return IEMOP_RAISE_INVALID_OPCODE();
1328}
1329
1330
1331/* Opcode 0x66 0x0f 0x38 0x83 - invalid. */
1332/* Opcode 0x66 0x0f 0x38 0x84 - invalid. */
1333/* Opcode 0x66 0x0f 0x38 0x85 - invalid. */
1334/* Opcode 0x66 0x0f 0x38 0x86 - invalid. */
1335/* Opcode 0x66 0x0f 0x38 0x87 - invalid. */
1336/* Opcode 0x66 0x0f 0x38 0x88 - invalid. */
1337/* Opcode 0x66 0x0f 0x38 0x89 - invalid. */
1338/* Opcode 0x66 0x0f 0x38 0x8a - invalid. */
1339/* Opcode 0x66 0x0f 0x38 0x8b - invalid. */
1340/* Opcode 0x66 0x0f 0x38 0x8c - invalid (vex only). */
1341/* Opcode 0x66 0x0f 0x38 0x8d - invalid. */
1342/* Opcode 0x66 0x0f 0x38 0x8e - invalid (vex only). */
1343/* Opcode 0x66 0x0f 0x38 0x8f - invalid. */
1344
1345/* Opcode 0x66 0x0f 0x38 0x90 - invalid (vex only). */
1346/* Opcode 0x66 0x0f 0x38 0x91 - invalid (vex only). */
1347/* Opcode 0x66 0x0f 0x38 0x92 - invalid (vex only). */
1348/* Opcode 0x66 0x0f 0x38 0x93 - invalid (vex only). */
1349/* Opcode 0x66 0x0f 0x38 0x94 - invalid. */
1350/* Opcode 0x66 0x0f 0x38 0x95 - invalid. */
1351/* Opcode 0x66 0x0f 0x38 0x96 - invalid (vex only). */
1352/* Opcode 0x66 0x0f 0x38 0x97 - invalid (vex only). */
1353/* Opcode 0x66 0x0f 0x38 0x98 - invalid (vex only). */
1354/* Opcode 0x66 0x0f 0x38 0x99 - invalid (vex only). */
1355/* Opcode 0x66 0x0f 0x38 0x9a - invalid (vex only). */
1356/* Opcode 0x66 0x0f 0x38 0x9b - invalid (vex only). */
1357/* Opcode 0x66 0x0f 0x38 0x9c - invalid (vex only). */
1358/* Opcode 0x66 0x0f 0x38 0x9d - invalid (vex only). */
1359/* Opcode 0x66 0x0f 0x38 0x9e - invalid (vex only). */
1360/* Opcode 0x66 0x0f 0x38 0x9f - invalid (vex only). */
1361
1362/* Opcode 0x66 0x0f 0x38 0xa0 - invalid. */
1363/* Opcode 0x66 0x0f 0x38 0xa1 - invalid. */
1364/* Opcode 0x66 0x0f 0x38 0xa2 - invalid. */
1365/* Opcode 0x66 0x0f 0x38 0xa3 - invalid. */
1366/* Opcode 0x66 0x0f 0x38 0xa4 - invalid. */
1367/* Opcode 0x66 0x0f 0x38 0xa5 - invalid. */
1368/* Opcode 0x66 0x0f 0x38 0xa6 - invalid (vex only). */
1369/* Opcode 0x66 0x0f 0x38 0xa7 - invalid (vex only). */
1370/* Opcode 0x66 0x0f 0x38 0xa8 - invalid (vex only). */
1371/* Opcode 0x66 0x0f 0x38 0xa9 - invalid (vex only). */
1372/* Opcode 0x66 0x0f 0x38 0xaa - invalid (vex only). */
1373/* Opcode 0x66 0x0f 0x38 0xab - invalid (vex only). */
1374/* Opcode 0x66 0x0f 0x38 0xac - invalid (vex only). */
1375/* Opcode 0x66 0x0f 0x38 0xad - invalid (vex only). */
1376/* Opcode 0x66 0x0f 0x38 0xae - invalid (vex only). */
1377/* Opcode 0x66 0x0f 0x38 0xaf - invalid (vex only). */
1378
1379/* Opcode 0x66 0x0f 0x38 0xb0 - invalid. */
1380/* Opcode 0x66 0x0f 0x38 0xb1 - invalid. */
1381/* Opcode 0x66 0x0f 0x38 0xb2 - invalid. */
1382/* Opcode 0x66 0x0f 0x38 0xb3 - invalid. */
1383/* Opcode 0x66 0x0f 0x38 0xb4 - invalid. */
1384/* Opcode 0x66 0x0f 0x38 0xb5 - invalid. */
1385/* Opcode 0x66 0x0f 0x38 0xb6 - invalid (vex only). */
1386/* Opcode 0x66 0x0f 0x38 0xb7 - invalid (vex only). */
1387/* Opcode 0x66 0x0f 0x38 0xb8 - invalid (vex only). */
1388/* Opcode 0x66 0x0f 0x38 0xb9 - invalid (vex only). */
1389/* Opcode 0x66 0x0f 0x38 0xba - invalid (vex only). */
1390/* Opcode 0x66 0x0f 0x38 0xbb - invalid (vex only). */
1391/* Opcode 0x66 0x0f 0x38 0xbc - invalid (vex only). */
1392/* Opcode 0x66 0x0f 0x38 0xbd - invalid (vex only). */
1393/* Opcode 0x66 0x0f 0x38 0xbe - invalid (vex only). */
1394/* Opcode 0x66 0x0f 0x38 0xbf - invalid (vex only). */
1395
1396/* Opcode 0x0f 0x38 0xc0 - invalid. */
1397/* Opcode 0x66 0x0f 0x38 0xc0 - invalid. */
1398/* Opcode 0x0f 0x38 0xc1 - invalid. */
1399/* Opcode 0x66 0x0f 0x38 0xc1 - invalid. */
1400/* Opcode 0x0f 0x38 0xc2 - invalid. */
1401/* Opcode 0x66 0x0f 0x38 0xc2 - invalid. */
1402/* Opcode 0x0f 0x38 0xc3 - invalid. */
1403/* Opcode 0x66 0x0f 0x38 0xc3 - invalid. */
1404/* Opcode 0x0f 0x38 0xc4 - invalid. */
1405/* Opcode 0x66 0x0f 0x38 0xc4 - invalid. */
1406/* Opcode 0x0f 0x38 0xc5 - invalid. */
1407/* Opcode 0x66 0x0f 0x38 0xc5 - invalid. */
1408/* Opcode 0x0f 0x38 0xc6 - invalid. */
1409/* Opcode 0x66 0x0f 0x38 0xc6 - invalid. */
1410/* Opcode 0x0f 0x38 0xc7 - invalid. */
1411/* Opcode 0x66 0x0f 0x38 0xc7 - invalid. */
1412/** Opcode 0x0f 0x38 0xc8. */
1413FNIEMOP_STUB(iemOp_sha1nexte_Vdq_Wdq);
1414/* Opcode 0x66 0x0f 0x38 0xc8 - invalid. */
1415/** Opcode 0x0f 0x38 0xc9. */
1416FNIEMOP_STUB(iemOp_sha1msg1_Vdq_Wdq);
1417/* Opcode 0x66 0x0f 0x38 0xc9 - invalid. */
1418/** Opcode 0x0f 0x38 0xca. */
1419FNIEMOP_STUB(iemOp_sha1msg2_Vdq_Wdq);
1420/* Opcode 0x66 0x0f 0x38 0xca - invalid. */
1421/** Opcode 0x0f 0x38 0xcb. */
1422FNIEMOP_STUB(iemOp_sha256rnds2_Vdq_Wdq);
1423/* Opcode 0x66 0x0f 0x38 0xcb - invalid. */
1424/** Opcode 0x0f 0x38 0xcc. */
1425FNIEMOP_STUB(iemOp_sha256msg1_Vdq_Wdq);
1426/* Opcode 0x66 0x0f 0x38 0xcc - invalid. */
1427/** Opcode 0x0f 0x38 0xcd. */
1428FNIEMOP_STUB(iemOp_sha256msg2_Vdq_Wdq);
1429/* Opcode 0x66 0x0f 0x38 0xcd - invalid. */
1430/* Opcode 0x0f 0x38 0xce - invalid. */
1431/* Opcode 0x66 0x0f 0x38 0xce - invalid. */
1432/* Opcode 0x0f 0x38 0xcf - invalid. */
1433/* Opcode 0x66 0x0f 0x38 0xcf - invalid. */
1434
1435/* Opcode 0x66 0x0f 0x38 0xd0 - invalid. */
1436/* Opcode 0x66 0x0f 0x38 0xd1 - invalid. */
1437/* Opcode 0x66 0x0f 0x38 0xd2 - invalid. */
1438/* Opcode 0x66 0x0f 0x38 0xd3 - invalid. */
1439/* Opcode 0x66 0x0f 0x38 0xd4 - invalid. */
1440/* Opcode 0x66 0x0f 0x38 0xd5 - invalid. */
1441/* Opcode 0x66 0x0f 0x38 0xd6 - invalid. */
1442/* Opcode 0x66 0x0f 0x38 0xd7 - invalid. */
1443/* Opcode 0x66 0x0f 0x38 0xd8 - invalid. */
1444/* Opcode 0x66 0x0f 0x38 0xd9 - invalid. */
1445/* Opcode 0x66 0x0f 0x38 0xda - invalid. */
1446
1447
1448/** Opcode 0x66 0x0f 0x38 0xdb. */
1449FNIEMOP_DEF(iemOp_aesimc_Vdq_Wdq)
1450{
1451 IEMOP_MNEMONIC2(RM, AESIMC, aesimc, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1452 return FNIEMOP_CALL_1(iemOpCommonAesNi_FullFull_To_Full,
1453 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_aesimc_u128, iemAImpl_aesimc_u128_fallback));
1454}
1455
1456
1457/** Opcode 0x66 0x0f 0x38 0xdc. */
1458FNIEMOP_DEF(iemOp_aesenc_Vdq_Wdq)
1459{
1460 IEMOP_MNEMONIC2(RM, AESENC, aesenc, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1461 return FNIEMOP_CALL_1(iemOpCommonAesNi_FullFull_To_Full,
1462 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_aesenc_u128, iemAImpl_aesenc_u128_fallback));
1463}
1464
1465
1466/** Opcode 0x66 0x0f 0x38 0xdd. */
1467FNIEMOP_DEF(iemOp_aesenclast_Vdq_Wdq)
1468{
1469 IEMOP_MNEMONIC2(RM, AESENCLAST, aesenclast, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1470 return FNIEMOP_CALL_1(iemOpCommonAesNi_FullFull_To_Full,
1471 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_aesenclast_u128, iemAImpl_aesenclast_u128_fallback));
1472}
1473
1474
1475/** Opcode 0x66 0x0f 0x38 0xde. */
1476FNIEMOP_DEF(iemOp_aesdec_Vdq_Wdq)
1477{
1478 IEMOP_MNEMONIC2(RM, AESDEC, aesdec, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1479 return FNIEMOP_CALL_1(iemOpCommonAesNi_FullFull_To_Full,
1480 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_aesdec_u128, iemAImpl_aesdec_u128_fallback));
1481}
1482
1483
1484/** Opcode 0x66 0x0f 0x38 0xdf. */
1485FNIEMOP_DEF(iemOp_aesdeclast_Vdq_Wdq)
1486{
1487 IEMOP_MNEMONIC2(RM, AESDECLAST, aesdeclast, Vdq, Wdq, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OP_SIZES);
1488 return FNIEMOP_CALL_1(iemOpCommonAesNi_FullFull_To_Full,
1489 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_aesdeclast_u128, iemAImpl_aesdeclast_u128_fallback));
1490}
1491
1492
1493/* Opcode 0x66 0x0f 0x38 0xe0 - invalid. */
1494/* Opcode 0x66 0x0f 0x38 0xe1 - invalid. */
1495/* Opcode 0x66 0x0f 0x38 0xe2 - invalid. */
1496/* Opcode 0x66 0x0f 0x38 0xe3 - invalid. */
1497/* Opcode 0x66 0x0f 0x38 0xe4 - invalid. */
1498/* Opcode 0x66 0x0f 0x38 0xe5 - invalid. */
1499/* Opcode 0x66 0x0f 0x38 0xe6 - invalid. */
1500/* Opcode 0x66 0x0f 0x38 0xe7 - invalid. */
1501/* Opcode 0x66 0x0f 0x38 0xe8 - invalid. */
1502/* Opcode 0x66 0x0f 0x38 0xe9 - invalid. */
1503/* Opcode 0x66 0x0f 0x38 0xea - invalid. */
1504/* Opcode 0x66 0x0f 0x38 0xeb - invalid. */
1505/* Opcode 0x66 0x0f 0x38 0xec - invalid. */
1506/* Opcode 0x66 0x0f 0x38 0xed - invalid. */
1507/* Opcode 0x66 0x0f 0x38 0xee - invalid. */
1508/* Opcode 0x66 0x0f 0x38 0xef - invalid. */
1509
1510
1511/** Opcode [0x66] 0x0f 0x38 0xf0. */
1512FNIEMOP_DEF(iemOp_movbe_Gv_Mv)
1513{
1514 IEMOP_MNEMONIC2(RM, MOVBE, movbe, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1515 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovBe)
1516 return iemOp_InvalidNeedRM(pVCpu);
1517
1518 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1519 if (!IEM_IS_MODRM_REG_MODE(bRm))
1520 {
1521 /*
1522 * Register, memory.
1523 */
1524 switch (pVCpu->iem.s.enmEffOpSize)
1525 {
1526 case IEMMODE_16BIT:
1527 IEM_MC_BEGIN(0, 2);
1528 IEM_MC_LOCAL(uint16_t, uSrc);
1529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1530
1531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1533 IEM_MC_FETCH_MEM_U16(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1534
1535 IEM_MC_BSWAP_LOCAL_U16(uSrc);
1536 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1537
1538 IEM_MC_ADVANCE_RIP_AND_FINISH();
1539 IEM_MC_END();
1540 break;
1541
1542 case IEMMODE_32BIT:
1543 IEM_MC_BEGIN(0, 2);
1544 IEM_MC_LOCAL(uint32_t, uSrc);
1545 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1546
1547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1549 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1550
1551 IEM_MC_BSWAP_LOCAL_U32(uSrc);
1552 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1553
1554 IEM_MC_ADVANCE_RIP_AND_FINISH();
1555 IEM_MC_END();
1556 break;
1557
1558 case IEMMODE_64BIT:
1559 IEM_MC_BEGIN(0, 2);
1560 IEM_MC_LOCAL(uint64_t, uSrc);
1561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1562
1563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1565 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1566
1567 IEM_MC_BSWAP_LOCAL_U64(uSrc);
1568 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc);
1569
1570 IEM_MC_ADVANCE_RIP_AND_FINISH();
1571 IEM_MC_END();
1572 break;
1573
1574 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1575 }
1576 }
1577 else
1578 {
1579 /* Reg/reg not supported. */
1580 return IEMOP_RAISE_INVALID_OPCODE();
1581 }
1582}
1583
1584
1585/* Opcode 0xf3 0x0f 0x38 0xf0 - invalid. */
1586
1587
1588/** Opcode 0xf2 0x0f 0x38 0xf0. */
1589FNIEMOP_DEF(iemOp_crc32_Gd_Eb)
1590{
1591 IEMOP_MNEMONIC2(RM, CRC32, crc32, Gd, Eb, DISOPTYPE_HARMLESS, 0);
1592 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse42)
1593 return iemOp_InvalidNeedRM(pVCpu);
1594
1595 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1596 if (IEM_IS_MODRM_REG_MODE(bRm))
1597 {
1598 /*
1599 * Register, register.
1600 */
1601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1602 IEM_MC_BEGIN(2, 0);
1603 IEM_MC_ARG(uint32_t *, puDst, 0);
1604 IEM_MC_ARG(uint8_t, uSrc, 1);
1605 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1606 IEM_MC_FETCH_GREG_U8(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1607 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u8, iemAImpl_crc32_u8_fallback), puDst, uSrc);
1608 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1609 IEM_MC_ADVANCE_RIP_AND_FINISH();
1610 IEM_MC_END();
1611 }
1612 else
1613 {
1614 /*
1615 * Register, memory.
1616 */
1617 IEM_MC_BEGIN(2, 1);
1618 IEM_MC_ARG(uint32_t *, puDst, 0);
1619 IEM_MC_ARG(uint8_t, uSrc, 1);
1620 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1621
1622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1624 IEM_MC_FETCH_MEM_U8(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1625
1626 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1627 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u8, iemAImpl_crc32_u8_fallback), puDst, uSrc);
1628 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1629
1630 IEM_MC_ADVANCE_RIP_AND_FINISH();
1631 IEM_MC_END();
1632 }
1633}
1634
1635
1636/** Opcode [0x66] 0x0f 0x38 0xf1. */
1637FNIEMOP_DEF(iemOp_movbe_Mv_Gv)
1638{
1639 IEMOP_MNEMONIC2(MR, MOVBE, movbe, Ev, Gv, DISOPTYPE_HARMLESS, 0);
1640 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovBe)
1641 return iemOp_InvalidNeedRM(pVCpu);
1642
1643 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1644 if (!IEM_IS_MODRM_REG_MODE(bRm))
1645 {
1646 /*
1647 * Memory, register.
1648 */
1649 switch (pVCpu->iem.s.enmEffOpSize)
1650 {
1651 case IEMMODE_16BIT:
1652 IEM_MC_BEGIN(0, 2);
1653 IEM_MC_LOCAL(uint16_t, u16Value);
1654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1657 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
1658 IEM_MC_BSWAP_LOCAL_U16(u16Value);
1659 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
1660 IEM_MC_ADVANCE_RIP_AND_FINISH();
1661 IEM_MC_END();
1662 break;
1663
1664 case IEMMODE_32BIT:
1665 IEM_MC_BEGIN(0, 2);
1666 IEM_MC_LOCAL(uint32_t, u32Value);
1667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1670 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
1671 IEM_MC_BSWAP_LOCAL_U32(u32Value);
1672 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
1673 IEM_MC_ADVANCE_RIP_AND_FINISH();
1674 IEM_MC_END();
1675 break;
1676
1677 case IEMMODE_64BIT:
1678 IEM_MC_BEGIN(0, 2);
1679 IEM_MC_LOCAL(uint64_t, u64Value);
1680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1683 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
1684 IEM_MC_BSWAP_LOCAL_U64(u64Value);
1685 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
1686 IEM_MC_ADVANCE_RIP_AND_FINISH();
1687 IEM_MC_END();
1688 break;
1689
1690 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1691 }
1692 }
1693 else
1694 {
1695 /* Reg/reg not supported. */
1696 return IEMOP_RAISE_INVALID_OPCODE();
1697 }
1698}
1699
1700
1701/* Opcode 0xf3 0x0f 0x38 0xf1 - invalid. */
1702
1703
1704/** Opcode 0xf2 0x0f 0x38 0xf1. */
1705FNIEMOP_DEF(iemOp_crc32_Gv_Ev)
1706{
1707 IEMOP_MNEMONIC2(RM, CRC32, crc32, Gd, Ev, DISOPTYPE_HARMLESS, 0);
1708 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse42)
1709 return iemOp_InvalidNeedRM(pVCpu);
1710
1711 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1712 if (IEM_IS_MODRM_REG_MODE(bRm))
1713 {
1714 /*
1715 * Register, register.
1716 */
1717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1718 switch (pVCpu->iem.s.enmEffOpSize)
1719 {
1720 case IEMMODE_16BIT:
1721 IEM_MC_BEGIN(2, 0);
1722 IEM_MC_ARG(uint32_t *, puDst, 0);
1723 IEM_MC_ARG(uint16_t, uSrc, 1);
1724 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1725 IEM_MC_FETCH_GREG_U16(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1726 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u16, iemAImpl_crc32_u16_fallback),
1727 puDst, uSrc);
1728 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1729 IEM_MC_ADVANCE_RIP_AND_FINISH();
1730 IEM_MC_END();
1731 break;
1732
1733 case IEMMODE_32BIT:
1734 IEM_MC_BEGIN(2, 0);
1735 IEM_MC_ARG(uint32_t *, puDst, 0);
1736 IEM_MC_ARG(uint32_t, uSrc, 1);
1737 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1738 IEM_MC_FETCH_GREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1739 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u32, iemAImpl_crc32_u32_fallback),
1740 puDst, uSrc);
1741 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1742 IEM_MC_ADVANCE_RIP_AND_FINISH();
1743 IEM_MC_END();
1744 break;
1745
1746 case IEMMODE_64BIT:
1747 IEM_MC_BEGIN(2, 0);
1748 IEM_MC_ARG(uint32_t *, puDst, 0);
1749 IEM_MC_ARG(uint64_t, uSrc, 1);
1750 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1751 IEM_MC_FETCH_GREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1752 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u64, iemAImpl_crc32_u64_fallback),
1753 puDst, uSrc);
1754 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1755 IEM_MC_ADVANCE_RIP_AND_FINISH();
1756 IEM_MC_END();
1757 break;
1758
1759 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1760 }
1761 }
1762 else
1763 {
1764 /*
1765 * Register, memory.
1766 */
1767 switch (pVCpu->iem.s.enmEffOpSize)
1768 {
1769 case IEMMODE_16BIT:
1770 IEM_MC_BEGIN(2, 1);
1771 IEM_MC_ARG(uint32_t *, puDst, 0);
1772 IEM_MC_ARG(uint16_t, uSrc, 1);
1773 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1774
1775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1777 IEM_MC_FETCH_MEM_U16(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1778
1779 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1780 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u16, iemAImpl_crc32_u16_fallback),
1781 puDst, uSrc);
1782 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1783
1784 IEM_MC_ADVANCE_RIP_AND_FINISH();
1785 IEM_MC_END();
1786 break;
1787
1788 case IEMMODE_32BIT:
1789 IEM_MC_BEGIN(2, 1);
1790 IEM_MC_ARG(uint32_t *, puDst, 0);
1791 IEM_MC_ARG(uint32_t, uSrc, 1);
1792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1793
1794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1796 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1797
1798 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1799 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u32, iemAImpl_crc32_u32_fallback),
1800 puDst, uSrc);
1801 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1802
1803 IEM_MC_ADVANCE_RIP_AND_FINISH();
1804 IEM_MC_END();
1805 break;
1806
1807 case IEMMODE_64BIT:
1808 IEM_MC_BEGIN(2, 1);
1809 IEM_MC_ARG(uint32_t *, puDst, 0);
1810 IEM_MC_ARG(uint64_t, uSrc, 1);
1811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1812
1813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1815 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1816
1817 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1818 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u64, iemAImpl_crc32_u64_fallback),
1819 puDst, uSrc);
1820 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst);
1821
1822 IEM_MC_ADVANCE_RIP_AND_FINISH();
1823 IEM_MC_END();
1824 break;
1825
1826 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1827 }
1828 }
1829}
1830
1831
1832/* Opcode 0x0f 0x38 0xf2 - invalid (vex only). */
1833/* Opcode 0x66 0x0f 0x38 0xf2 - invalid. */
1834/* Opcode 0xf3 0x0f 0x38 0xf2 - invalid. */
1835/* Opcode 0xf2 0x0f 0x38 0xf2 - invalid. */
1836
1837/* Opcode 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
1838/* Opcode 0x66 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
1839/* Opcode 0xf3 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
1840/* Opcode 0xf2 0x0f 0x38 0xf3 - invalid (vex only - group 17). */
1841
1842/* Opcode 0x0f 0x38 0xf4 - invalid. */
1843/* Opcode 0x66 0x0f 0x38 0xf4 - invalid. */
1844/* Opcode 0xf3 0x0f 0x38 0xf4 - invalid. */
1845/* Opcode 0xf2 0x0f 0x38 0xf4 - invalid. */
1846
1847/* Opcode 0x0f 0x38 0xf5 - invalid (vex only). */
1848/* Opcode 0x66 0x0f 0x38 0xf5 - invalid. */
1849/* Opcode 0xf3 0x0f 0x38 0xf5 - invalid (vex only). */
1850/* Opcode 0xf2 0x0f 0x38 0xf5 - invalid (vex only). */
1851
1852/* Opcode 0x0f 0x38 0xf6 - invalid. */
1853/** Opcode 0x66 0x0f 0x38 0xf6. */
1854FNIEMOP_STUB(iemOp_adcx_Gy_Ey);
1855/** Opcode 0xf3 0x0f 0x38 0xf6. */
1856FNIEMOP_STUB(iemOp_adox_Gy_Ey);
1857/* Opcode 0xf2 0x0f 0x38 0xf6 - invalid (vex only). */
1858
1859/* Opcode 0x0f 0x38 0xf7 - invalid (vex only). */
1860/* Opcode 0x66 0x0f 0x38 0xf7 - invalid (vex only). */
1861/* Opcode 0xf3 0x0f 0x38 0xf7 - invalid (vex only). */
1862/* Opcode 0xf2 0x0f 0x38 0xf7 - invalid (vex only). */
1863
1864/* Opcode 0x0f 0x38 0xf8 - invalid. */
1865/* Opcode 0x66 0x0f 0x38 0xf8 - invalid. */
1866/* Opcode 0xf3 0x0f 0x38 0xf8 - invalid. */
1867/* Opcode 0xf2 0x0f 0x38 0xf8 - invalid. */
1868
1869/* Opcode 0x0f 0x38 0xf9 - invalid. */
1870/* Opcode 0x66 0x0f 0x38 0xf9 - invalid. */
1871/* Opcode 0xf3 0x0f 0x38 0xf9 - invalid. */
1872/* Opcode 0xf2 0x0f 0x38 0xf9 - invalid. */
1873
1874/* Opcode 0x0f 0x38 0xfa - invalid. */
1875/* Opcode 0x66 0x0f 0x38 0xfa - invalid. */
1876/* Opcode 0xf3 0x0f 0x38 0xfa - invalid. */
1877/* Opcode 0xf2 0x0f 0x38 0xfa - invalid. */
1878
1879/* Opcode 0x0f 0x38 0xfb - invalid. */
1880/* Opcode 0x66 0x0f 0x38 0xfb - invalid. */
1881/* Opcode 0xf3 0x0f 0x38 0xfb - invalid. */
1882/* Opcode 0xf2 0x0f 0x38 0xfb - invalid. */
1883
1884/* Opcode 0x0f 0x38 0xfc - invalid. */
1885/* Opcode 0x66 0x0f 0x38 0xfc - invalid. */
1886/* Opcode 0xf3 0x0f 0x38 0xfc - invalid. */
1887/* Opcode 0xf2 0x0f 0x38 0xfc - invalid. */
1888
1889/* Opcode 0x0f 0x38 0xfd - invalid. */
1890/* Opcode 0x66 0x0f 0x38 0xfd - invalid. */
1891/* Opcode 0xf3 0x0f 0x38 0xfd - invalid. */
1892/* Opcode 0xf2 0x0f 0x38 0xfd - invalid. */
1893
1894/* Opcode 0x0f 0x38 0xfe - invalid. */
1895/* Opcode 0x66 0x0f 0x38 0xfe - invalid. */
1896/* Opcode 0xf3 0x0f 0x38 0xfe - invalid. */
1897/* Opcode 0xf2 0x0f 0x38 0xfe - invalid. */
1898
1899/* Opcode 0x0f 0x38 0xff - invalid. */
1900/* Opcode 0x66 0x0f 0x38 0xff - invalid. */
1901/* Opcode 0xf3 0x0f 0x38 0xff - invalid. */
1902/* Opcode 0xf2 0x0f 0x38 0xff - invalid. */
1903
1904
1905/**
1906 * Three byte opcode map, first two bytes are 0x0f 0x38.
1907 * @sa g_apfnVexMap2
1908 */
1909IEM_STATIC const PFNIEMOP g_apfnThreeByte0f38[] =
1910{
1911 /* no prefix, 066h prefix f3h prefix, f2h prefix */
1912 /* 0x00 */ iemOp_pshufb_Pq_Qq, iemOp_pshufb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1913 /* 0x01 */ iemOp_phaddw_Pq_Qq, iemOp_phaddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1914 /* 0x02 */ iemOp_phaddd_Pq_Qq, iemOp_phaddd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1915 /* 0x03 */ iemOp_phaddsw_Pq_Qq, iemOp_phaddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1916 /* 0x04 */ iemOp_pmaddubsw_Pq_Qq, iemOp_pmaddubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1917 /* 0x05 */ iemOp_phsubw_Pq_Qq, iemOp_phsubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1918 /* 0x06 */ iemOp_phsubd_Pq_Qq, iemOp_phsubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1919 /* 0x07 */ iemOp_phsubsw_Pq_Qq, iemOp_phsubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1920 /* 0x08 */ iemOp_psignb_Pq_Qq, iemOp_psignb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1921 /* 0x09 */ iemOp_psignw_Pq_Qq, iemOp_psignw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1922 /* 0x0a */ iemOp_psignd_Pq_Qq, iemOp_psignd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1923 /* 0x0b */ iemOp_pmulhrsw_Pq_Qq, iemOp_pmulhrsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1924 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),
1925 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),
1926 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),
1927 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),
1928
1929 /* 0x10 */ iemOp_InvalidNeedRM, iemOp_pblendvb_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1930 /* 0x11 */ IEMOP_X4(iemOp_InvalidNeedRM),
1931 /* 0x12 */ IEMOP_X4(iemOp_InvalidNeedRM),
1932 /* 0x13 */ IEMOP_X4(iemOp_InvalidNeedRM),
1933 /* 0x14 */ iemOp_InvalidNeedRM, iemOp_blendvps_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1934 /* 0x15 */ iemOp_InvalidNeedRM, iemOp_blendvpd_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1935 /* 0x16 */ IEMOP_X4(iemOp_InvalidNeedRM),
1936 /* 0x17 */ iemOp_InvalidNeedRM, iemOp_ptest_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1937 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),
1938 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),
1939 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),
1940 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),
1941 /* 0x1c */ iemOp_pabsb_Pq_Qq, iemOp_pabsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1942 /* 0x1d */ iemOp_pabsw_Pq_Qq, iemOp_pabsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1943 /* 0x1e */ iemOp_pabsd_Pq_Qq, iemOp_pabsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1944 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),
1945
1946 /* 0x20 */ iemOp_InvalidNeedRM, iemOp_pmovsxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1947 /* 0x21 */ iemOp_InvalidNeedRM, iemOp_pmovsxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1948 /* 0x22 */ iemOp_InvalidNeedRM, iemOp_pmovsxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1949 /* 0x23 */ iemOp_InvalidNeedRM, iemOp_pmovsxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1950 /* 0x24 */ iemOp_InvalidNeedRM, iemOp_pmovsxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1951 /* 0x25 */ iemOp_InvalidNeedRM, iemOp_pmovsxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1952 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),
1953 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),
1954 /* 0x28 */ iemOp_InvalidNeedRM, iemOp_pmuldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1955 /* 0x29 */ iemOp_InvalidNeedRM, iemOp_pcmpeqq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1956 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_movntdqa_Vdq_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1957 /* 0x2b */ iemOp_InvalidNeedRM, iemOp_packusdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1958 /* 0x2c */ IEMOP_X4(iemOp_InvalidNeedRM),
1959 /* 0x2d */ IEMOP_X4(iemOp_InvalidNeedRM),
1960 /* 0x2e */ IEMOP_X4(iemOp_InvalidNeedRM),
1961 /* 0x2f */ IEMOP_X4(iemOp_InvalidNeedRM),
1962
1963 /* 0x30 */ iemOp_InvalidNeedRM, iemOp_pmovzxbw_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1964 /* 0x31 */ iemOp_InvalidNeedRM, iemOp_pmovzxbd_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1965 /* 0x32 */ iemOp_InvalidNeedRM, iemOp_pmovzxbq_Vx_UxMw, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1966 /* 0x33 */ iemOp_InvalidNeedRM, iemOp_pmovzxwd_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1967 /* 0x34 */ iemOp_InvalidNeedRM, iemOp_pmovzxwq_Vx_UxMd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1968 /* 0x35 */ iemOp_InvalidNeedRM, iemOp_pmovzxdq_Vx_UxMq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1969 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),
1970 /* 0x37 */ iemOp_InvalidNeedRM, iemOp_pcmpgtq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1971 /* 0x38 */ iemOp_InvalidNeedRM, iemOp_pminsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1972 /* 0x39 */ iemOp_InvalidNeedRM, iemOp_pminsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1973 /* 0x3a */ iemOp_InvalidNeedRM, iemOp_pminuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1974 /* 0x3b */ iemOp_InvalidNeedRM, iemOp_pminud_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1975 /* 0x3c */ iemOp_InvalidNeedRM, iemOp_pmaxsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1976 /* 0x3d */ iemOp_InvalidNeedRM, iemOp_pmaxsd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1977 /* 0x3e */ iemOp_InvalidNeedRM, iemOp_pmaxuw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1978 /* 0x3f */ iemOp_InvalidNeedRM, iemOp_pmaxud_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1979
1980 /* 0x40 */ iemOp_InvalidNeedRM, iemOp_pmulld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1981 /* 0x41 */ iemOp_InvalidNeedRM, iemOp_phminposuw_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
1982 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),
1983 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),
1984 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),
1985 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),
1986 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),
1987 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),
1988 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),
1989 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),
1990 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),
1991 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),
1992 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),
1993 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),
1994 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),
1995 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),
1996
1997 /* 0x50 */ IEMOP_X4(iemOp_InvalidNeedRM),
1998 /* 0x51 */ IEMOP_X4(iemOp_InvalidNeedRM),
1999 /* 0x52 */ IEMOP_X4(iemOp_InvalidNeedRM),
2000 /* 0x53 */ IEMOP_X4(iemOp_InvalidNeedRM),
2001 /* 0x54 */ IEMOP_X4(iemOp_InvalidNeedRM),
2002 /* 0x55 */ IEMOP_X4(iemOp_InvalidNeedRM),
2003 /* 0x56 */ IEMOP_X4(iemOp_InvalidNeedRM),
2004 /* 0x57 */ IEMOP_X4(iemOp_InvalidNeedRM),
2005 /* 0x58 */ IEMOP_X4(iemOp_InvalidNeedRM),
2006 /* 0x59 */ IEMOP_X4(iemOp_InvalidNeedRM),
2007 /* 0x5a */ IEMOP_X4(iemOp_InvalidNeedRM),
2008 /* 0x5b */ IEMOP_X4(iemOp_InvalidNeedRM),
2009 /* 0x5c */ IEMOP_X4(iemOp_InvalidNeedRM),
2010 /* 0x5d */ IEMOP_X4(iemOp_InvalidNeedRM),
2011 /* 0x5e */ IEMOP_X4(iemOp_InvalidNeedRM),
2012 /* 0x5f */ IEMOP_X4(iemOp_InvalidNeedRM),
2013
2014 /* 0x60 */ IEMOP_X4(iemOp_InvalidNeedRM),
2015 /* 0x61 */ IEMOP_X4(iemOp_InvalidNeedRM),
2016 /* 0x62 */ IEMOP_X4(iemOp_InvalidNeedRM),
2017 /* 0x63 */ IEMOP_X4(iemOp_InvalidNeedRM),
2018 /* 0x64 */ IEMOP_X4(iemOp_InvalidNeedRM),
2019 /* 0x65 */ IEMOP_X4(iemOp_InvalidNeedRM),
2020 /* 0x66 */ IEMOP_X4(iemOp_InvalidNeedRM),
2021 /* 0x67 */ IEMOP_X4(iemOp_InvalidNeedRM),
2022 /* 0x68 */ IEMOP_X4(iemOp_InvalidNeedRM),
2023 /* 0x69 */ IEMOP_X4(iemOp_InvalidNeedRM),
2024 /* 0x6a */ IEMOP_X4(iemOp_InvalidNeedRM),
2025 /* 0x6b */ IEMOP_X4(iemOp_InvalidNeedRM),
2026 /* 0x6c */ IEMOP_X4(iemOp_InvalidNeedRM),
2027 /* 0x6d */ IEMOP_X4(iemOp_InvalidNeedRM),
2028 /* 0x6e */ IEMOP_X4(iemOp_InvalidNeedRM),
2029 /* 0x6f */ IEMOP_X4(iemOp_InvalidNeedRM),
2030
2031 /* 0x70 */ IEMOP_X4(iemOp_InvalidNeedRM),
2032 /* 0x71 */ IEMOP_X4(iemOp_InvalidNeedRM),
2033 /* 0x72 */ IEMOP_X4(iemOp_InvalidNeedRM),
2034 /* 0x73 */ IEMOP_X4(iemOp_InvalidNeedRM),
2035 /* 0x74 */ IEMOP_X4(iemOp_InvalidNeedRM),
2036 /* 0x75 */ IEMOP_X4(iemOp_InvalidNeedRM),
2037 /* 0x76 */ IEMOP_X4(iemOp_InvalidNeedRM),
2038 /* 0x77 */ IEMOP_X4(iemOp_InvalidNeedRM),
2039 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),
2040 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),
2041 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),
2042 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),
2043 /* 0x7c */ IEMOP_X4(iemOp_InvalidNeedRM),
2044 /* 0x7d */ IEMOP_X4(iemOp_InvalidNeedRM),
2045 /* 0x7e */ IEMOP_X4(iemOp_InvalidNeedRM),
2046 /* 0x7f */ IEMOP_X4(iemOp_InvalidNeedRM),
2047
2048 /* 0x80 */ iemOp_InvalidNeedRM, iemOp_invept_Gy_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2049 /* 0x81 */ iemOp_InvalidNeedRM, iemOp_invvpid_Gy_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2050 /* 0x82 */ iemOp_InvalidNeedRM, iemOp_invpcid_Gy_Mdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2051 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),
2052 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),
2053 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),
2054 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),
2055 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),
2056 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),
2057 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),
2058 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),
2059 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),
2060 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),
2061 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),
2062 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),
2063 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),
2064
2065 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),
2066 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),
2067 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),
2068 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),
2069 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),
2070 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),
2071 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),
2072 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),
2073 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),
2074 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),
2075 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),
2076 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),
2077 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),
2078 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),
2079 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),
2080 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),
2081
2082 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2083 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2084 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2085 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2086 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2087 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2088 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2089 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2090 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2091 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2092 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),
2093 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),
2094 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),
2095 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),
2096 /* 0xae */ IEMOP_X4(iemOp_InvalidNeedRM),
2097 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),
2098
2099 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2100 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2101 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2102 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2103 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2104 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2105 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2106 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2107 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2108 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2109 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),
2110 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),
2111 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),
2112 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),
2113 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),
2114 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),
2115
2116 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2117 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2118 /* 0xc2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2119 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2120 /* 0xc4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2121 /* 0xc5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2122 /* 0xc6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2123 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2124 /* 0xc8 */ iemOp_sha1nexte_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2125 /* 0xc9 */ iemOp_sha1msg1_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2126 /* 0xca */ iemOp_sha1msg2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2127 /* 0xcb */ iemOp_sha256rnds2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2128 /* 0xcc */ iemOp_sha256msg1_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2129 /* 0xcd */ iemOp_sha256msg2_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2130 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),
2131 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),
2132
2133 /* 0xd0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2134 /* 0xd1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2135 /* 0xd2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2136 /* 0xd3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2137 /* 0xd4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2138 /* 0xd5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2139 /* 0xd6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2140 /* 0xd7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2141 /* 0xd8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2142 /* 0xd9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2143 /* 0xda */ IEMOP_X4(iemOp_InvalidNeedRM),
2144 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_aesimc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2145 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_aesenc_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2146 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_aesenclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2147 /* 0xde */ iemOp_InvalidNeedRM, iemOp_aesdec_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2148 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_aesdeclast_Vdq_Wdq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
2149
2150 /* 0xe0 */ IEMOP_X4(iemOp_InvalidNeedRM),
2151 /* 0xe1 */ IEMOP_X4(iemOp_InvalidNeedRM),
2152 /* 0xe2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2153 /* 0xe3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2154 /* 0xe4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2155 /* 0xe5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2156 /* 0xe6 */ IEMOP_X4(iemOp_InvalidNeedRM),
2157 /* 0xe7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2158 /* 0xe8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2159 /* 0xe9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2160 /* 0xea */ IEMOP_X4(iemOp_InvalidNeedRM),
2161 /* 0xeb */ IEMOP_X4(iemOp_InvalidNeedRM),
2162 /* 0xec */ IEMOP_X4(iemOp_InvalidNeedRM),
2163 /* 0xed */ IEMOP_X4(iemOp_InvalidNeedRM),
2164 /* 0xee */ IEMOP_X4(iemOp_InvalidNeedRM),
2165 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRM),
2166
2167 /* 0xf0 */ iemOp_movbe_Gv_Mv, iemOp_movbe_Gv_Mv, iemOp_InvalidNeedRM, iemOp_crc32_Gd_Eb,
2168 /* 0xf1 */ iemOp_movbe_Mv_Gv, iemOp_movbe_Mv_Gv, iemOp_InvalidNeedRM, iemOp_crc32_Gv_Ev,
2169 /* 0xf2 */ IEMOP_X4(iemOp_InvalidNeedRM),
2170 /* 0xf3 */ IEMOP_X4(iemOp_InvalidNeedRM),
2171 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRM),
2172 /* 0xf5 */ IEMOP_X4(iemOp_InvalidNeedRM),
2173 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_adcx_Gy_Ey, iemOp_adox_Gy_Ey, iemOp_InvalidNeedRM,
2174 /* 0xf7 */ IEMOP_X4(iemOp_InvalidNeedRM),
2175 /* 0xf8 */ IEMOP_X4(iemOp_InvalidNeedRM),
2176 /* 0xf9 */ IEMOP_X4(iemOp_InvalidNeedRM),
2177 /* 0xfa */ IEMOP_X4(iemOp_InvalidNeedRM),
2178 /* 0xfb */ IEMOP_X4(iemOp_InvalidNeedRM),
2179 /* 0xfc */ IEMOP_X4(iemOp_InvalidNeedRM),
2180 /* 0xfd */ IEMOP_X4(iemOp_InvalidNeedRM),
2181 /* 0xfe */ IEMOP_X4(iemOp_InvalidNeedRM),
2182 /* 0xff */ IEMOP_X4(iemOp_InvalidNeedRM),
2183};
2184AssertCompile(RT_ELEMENTS(g_apfnThreeByte0f38) == 1024);
2185
2186/** @} */
2187
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette