VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstThree0f3a.cpp.h@ 102012

Last change on this file since 102012 was 102012, checked in by vboxsync, 13 months ago

VMM/IEM: If we use structure variables in MC blocks, we need special fetch and store MCs for them or it won't be possible to recompile the code (as variables references are translated to uint8_t indexes by name, no subfield access possible). So, added some variable checking to tstIEMCheckMc and addressed the issues found. (There is more to do here, but tomorrow.) bugref:10371

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 79.7 KB
Line 
1/* $Id: IEMAllInstThree0f3a.cpp.h 102012 2023-11-09 02:09:51Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation, 0x0f 0x3a map.
4 *
5 * @remarks IEMAllInstVexMap3.cpp.h is a VEX mirror of this file.
6 * Any update here is likely needed in that file too.
7 */
8
9/*
10 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
11 *
12 * This file is part of VirtualBox base platform packages, as
13 * available from https://www.virtualbox.org.
14 *
15 * This program is free software; you can redistribute it and/or
16 * modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation, in version 3 of the
18 * License.
19 *
20 * This program is distributed in the hope that it will be useful, but
21 * WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, see <https://www.gnu.org/licenses>.
27 *
28 * SPDX-License-Identifier: GPL-3.0-only
29 */
30
31
32/** @name Three byte opcodes with first two bytes 0x0f 0x3a
33 * @{
34 */
35
36/**
37 * Common worker for SSSE3 instructions on the forms:
38 * pxxx xmm1, xmm2/mem128, imm8
39 *
40 * Proper alignment of the 128-bit operand is enforced.
41 * Exceptions type 4. SSSE3 cpuid checks.
42 *
43 * @sa iemOpCommonSse41_FullFullImm8_To_Full
44 */
45FNIEMOP_DEF_1(iemOpCommonSsse3_FullFullImm8_To_Full, PFNIEMAIMPLMEDIAOPTF2U128IMM8, pfnU128)
46{
47 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
48 if (IEM_IS_MODRM_REG_MODE(bRm))
49 {
50 /*
51 * Register, register.
52 */
53 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
54 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
55 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSsse3);
56 IEM_MC_ARG(PRTUINT128U, puDst, 0);
57 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
58 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
59 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
60 IEM_MC_PREPARE_SSE_USAGE();
61 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
62 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
63 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
64 IEM_MC_ADVANCE_RIP_AND_FINISH();
65 IEM_MC_END();
66 }
67 else
68 {
69 /*
70 * Register, memory.
71 */
72 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
73 IEM_MC_ARG(PRTUINT128U, puDst, 0);
74 IEM_MC_LOCAL(RTUINT128U, uSrc);
75 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
76 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
77
78 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
79 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
80 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
81 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSsse3);
82 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
83 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
84
85 IEM_MC_PREPARE_SSE_USAGE();
86 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
87 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
88
89 IEM_MC_ADVANCE_RIP_AND_FINISH();
90 IEM_MC_END();
91 }
92}
93
94
95/**
96 * Common worker for SSE 4.1 instructions on the forms:
97 * pxxx xmm1, xmm2/mem128, imm8
98 *
99 * Proper alignment of the 128-bit operand is enforced.
100 * No SIMD exceptions. SSE 4.1 cpuid checks.
101 *
102 * @sa iemOpCommonSsse3_FullFullImm8_To_Full
103 */
104FNIEMOP_DEF_1(iemOpCommonSse41_FullFullImm8_To_Full, PFNIEMAIMPLMEDIAOPTF2U128IMM8, pfnU128)
105{
106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
107 if (IEM_IS_MODRM_REG_MODE(bRm))
108 {
109 /*
110 * XMM, XMM, imm8
111 */
112 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
113 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
115 IEM_MC_ARG(PRTUINT128U, puDst, 0);
116 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
117 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
118 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
119 IEM_MC_PREPARE_SSE_USAGE();
120 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
121 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
122 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
123 IEM_MC_ADVANCE_RIP_AND_FINISH();
124 IEM_MC_END();
125 }
126 else
127 {
128 /*
129 * XMM, [mem128], imm8.
130 */
131 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
132 IEM_MC_ARG(PRTUINT128U, puDst, 0);
133 IEM_MC_LOCAL(RTUINT128U, uSrc);
134 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
136
137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
138 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
139 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
141 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
142 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
143
144 IEM_MC_PREPARE_SSE_USAGE();
145 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
146 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
147
148 IEM_MC_ADVANCE_RIP_AND_FINISH();
149 IEM_MC_END();
150 }
151}
152
153
154/**
155 * Common worker for SSE 4.1 instructions of the form:
156 * xxx xmm1, xmm2/mem128, imm8
157 *
158 * Proper alignment of the 128-bit operand is enforced.
159 * MXCSR is used as input and output.
160 * Exceptions type 4. SSE 4.1 cpuid checks.
161 *
162 * @sa iemOpCommonSse41_FullFullImm8_To_Full
163 */
164FNIEMOP_DEF_1(iemOpCommonSse41Fp_FullFullImm8_To_Full, PFNIEMAIMPLMXCSRF2XMMIMM8, pfnU128)
165{
166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
167 if (IEM_IS_MODRM_REG_MODE(bRm))
168 {
169 /*
170 * XMM, XMM, imm8.
171 */
172 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
173 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
175 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
176 IEM_MC_LOCAL(X86XMMREG, Dst);
177 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
178 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
179 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
180 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
181 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
182 IEM_MC_PREPARE_SSE_USAGE();
183 IEM_MC_REF_MXCSR(pfMxcsr);
184
185 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
186 IEM_MC_CALL_VOID_AIMPL_4(pfnU128, pfMxcsr, pDst, pSrc, bImmArg);
187 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
188 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
189
190 IEM_MC_ADVANCE_RIP_AND_FINISH();
191 IEM_MC_END();
192 }
193 else
194 {
195 /*
196 * XMM, [mem128], imm8.
197 */
198 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
199 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
200 IEM_MC_LOCAL(X86XMMREG, Dst);
201 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
202 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
203 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
205
206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
207 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
208 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
210 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
211 IEM_MC_PREPARE_SSE_USAGE();
212
213 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
214 IEM_MC_REF_MXCSR(pfMxcsr);
215 IEM_MC_CALL_VOID_AIMPL_4(pfnU128, pfMxcsr, pDst, pSrc, bImmArg);
216 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
217 IEM_MC_STORE_XREG_XMM(IEM_GET_MODRM_REG(pVCpu, bRm), Dst);
218
219 IEM_MC_ADVANCE_RIP_AND_FINISH();
220 IEM_MC_END();
221 }
222}
223
224
225/**
226 * Common worker for SSE-style AES-NI instructions of the form:
227 * aesxxx xmm1, xmm2/mem128, imm8
228 *
229 * Proper alignment of the 128-bit operand is enforced.
230 * Exceptions type 4. AES-NI cpuid checks.
231 *
232 * @sa iemOpCommonSsse3_FullFullImm8_To_Full
233 * @sa iemOpCommonSse41_FullFullImm8_To_Full
234 */
235FNIEMOP_DEF_1(iemOpCommonAesNi_FullFullImm8_To_Full, PFNIEMAIMPLMEDIAOPTF2U128IMM8, pfnU128)
236{
237 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
238 if (IEM_IS_MODRM_REG_MODE(bRm))
239 {
240 /*
241 * Register, register.
242 */
243 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
244 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fAesNi);
246 IEM_MC_ARG(PRTUINT128U, puDst, 0);
247 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
248 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
249 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
250 IEM_MC_PREPARE_SSE_USAGE();
251 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
252 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
253 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
254 IEM_MC_ADVANCE_RIP_AND_FINISH();
255 IEM_MC_END();
256 }
257 else
258 {
259 /*
260 * Register, memory.
261 */
262 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
263 IEM_MC_ARG(PRTUINT128U, puDst, 0);
264 IEM_MC_LOCAL(RTUINT128U, uSrc);
265 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
267
268 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
269 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
270 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fAesNi);
272 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
273 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
274
275 IEM_MC_PREPARE_SSE_USAGE();
276 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
277 IEM_MC_CALL_VOID_AIMPL_3(pfnU128, puDst, puSrc, bImmArg);
278
279 IEM_MC_ADVANCE_RIP_AND_FINISH();
280 IEM_MC_END();
281 }
282}
283
284
285/** Opcode 0x66 0x0f 0x00 - invalid (vex only). */
286/** Opcode 0x66 0x0f 0x01 - invalid (vex only). */
287/** Opcode 0x66 0x0f 0x02 - invalid (vex only). */
288/* Opcode 0x66 0x0f 0x03 - invalid */
289/** Opcode 0x66 0x0f 0x04 - invalid (vex only). */
290/** Opcode 0x66 0x0f 0x05 - invalid (vex only). */
291/* Opcode 0x66 0x0f 0x06 - invalid (vex only) */
292/* Opcode 0x66 0x0f 0x07 - invalid */
293/** Opcode 0x66 0x0f 0x08. */
294FNIEMOP_DEF(iemOp_roundps_Vx_Wx_Ib)
295{
296 IEMOP_MNEMONIC3(RMI, ROUNDPS, roundps, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
297 return FNIEMOP_CALL_1(iemOpCommonSse41Fp_FullFullImm8_To_Full,
298 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_roundps_u128, iemAImpl_roundps_u128_fallback));
299}
300
301
302/** Opcode 0x66 0x0f 0x09. */
303FNIEMOP_DEF(iemOp_roundpd_Vx_Wx_Ib)
304{
305 IEMOP_MNEMONIC3(RMI, ROUNDPD, roundpd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
306 return FNIEMOP_CALL_1(iemOpCommonSse41Fp_FullFullImm8_To_Full,
307 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_roundpd_u128, iemAImpl_roundpd_u128_fallback));
308}
309
310
311/** Opcode 0x66 0x0f 0x0a. */
312FNIEMOP_DEF(iemOp_roundss_Vss_Wss_Ib)
313{
314 /* The instruction form is very similar to CMPSS. */
315 IEMOP_MNEMONIC3(RMI, ROUNDSS, roundss, Vss, Wss, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
316
317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
318 if (IEM_IS_MODRM_REG_MODE(bRm))
319 {
320 /*
321 * XMM32, XMM32.
322 */
323 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
324 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
326 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
327 IEM_MC_LOCAL(X86XMMREG, Dst);
328 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
329 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
330 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
331 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
332 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
333 IEM_MC_PREPARE_SSE_USAGE();
334 IEM_MC_REF_MXCSR(pfMxcsr);
335 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
336 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_roundss_u128, pfMxcsr, pDst, pSrc, bImmArg);
337 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
338 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
339
340 IEM_MC_ADVANCE_RIP_AND_FINISH();
341 IEM_MC_END();
342 }
343 else
344 {
345 /*
346 * XMM32, [mem32].
347 */
348 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
349 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
350 IEM_MC_LOCAL(X86XMMREG, Dst);
351 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
352 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
353 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
355
356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
357 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
358 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
360 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
361 IEM_MC_PREPARE_SSE_USAGE();
362
363 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
364 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
365 IEM_MC_REF_MXCSR(pfMxcsr);
366 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_roundss_u128, pfMxcsr, pDst, pSrc, bImmArg);
367 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
368 IEM_MC_STORE_XREG_XMM_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, Dst);
369
370 IEM_MC_ADVANCE_RIP_AND_FINISH();
371 IEM_MC_END();
372 }
373}
374
375/** Opcode 0x66 0x0f 0x0b. */
376FNIEMOP_DEF(iemOp_roundsd_Vsd_Wsd_Ib)
377{
378 /* The instruction form is very similar to CMPSD. */
379 IEMOP_MNEMONIC3(RMI, ROUNDSD, roundsd, Vsd, Wsd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
380
381 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
382 if (IEM_IS_MODRM_REG_MODE(bRm))
383 {
384 /*
385 * XMM64, XMM64, imm8.
386 */
387 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
388 IEM_MC_BEGIN(4, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
390 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
391 IEM_MC_LOCAL(X86XMMREG, Dst);
392 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
393 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
394 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
395 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
396 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
397 IEM_MC_PREPARE_SSE_USAGE();
398 IEM_MC_REF_MXCSR(pfMxcsr);
399 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
400 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_roundsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
401 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
402 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
403
404 IEM_MC_ADVANCE_RIP_AND_FINISH();
405 IEM_MC_END();
406 }
407 else
408 {
409 /*
410 * XMM64, [mem64], imm8.
411 */
412 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
413 IEM_MC_LOCAL(IEMMEDIAF2XMMSRC, Src);
414 IEM_MC_LOCAL(X86XMMREG, Dst);
415 IEM_MC_ARG(uint32_t *, pfMxcsr, 0);
416 IEM_MC_ARG_LOCAL_REF(PX86XMMREG, pDst, Dst, 1);
417 IEM_MC_ARG_LOCAL_REF(PCIEMMEDIAF2XMMSRC, pSrc, Src, 2);
418 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
419
420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
421 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
422 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
424 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
425 IEM_MC_PREPARE_SSE_USAGE();
426
427 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
428 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
429 IEM_MC_REF_MXCSR(pfMxcsr);
430 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_roundsd_u128, pfMxcsr, pDst, pSrc, bImmArg);
431 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT();
432 IEM_MC_STORE_XREG_XMM_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iQword*/, Dst);
433
434 IEM_MC_ADVANCE_RIP_AND_FINISH();
435 IEM_MC_END();
436 }
437}
438
439
440/** Opcode 0x66 0x0f 0x0c. */
441FNIEMOP_DEF(iemOp_blendps_Vx_Wx_Ib)
442{
443 IEMOP_MNEMONIC3(RMI, BLENDPS, blendps, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
444 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFullImm8_To_Full,
445 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_blendps_u128, iemAImpl_blendps_u128_fallback));
446}
447
448
449/** Opcode 0x66 0x0f 0x0d. */
450FNIEMOP_DEF(iemOp_blendpd_Vx_Wx_Ib)
451{
452 IEMOP_MNEMONIC3(RMI, BLENDPD, blendpd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
453 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFullImm8_To_Full,
454 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_blendpd_u128, iemAImpl_blendpd_u128_fallback));
455}
456
457
458/** Opcode 0x66 0x0f 0x0e. */
459FNIEMOP_DEF(iemOp_pblendw_Vx_Wx_Ib)
460{
461 IEMOP_MNEMONIC3(RMI, PBLENDW, pblendw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
462 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFullImm8_To_Full,
463 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_pblendw_u128, iemAImpl_pblendw_u128_fallback));
464}
465
466
467/** Opcode 0x0f 0x0f. */
468FNIEMOP_DEF(iemOp_palignr_Pq_Qq_Ib)
469{
470 IEMOP_MNEMONIC3(RMI, PALIGNR, palignr, Pq, Qq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
472 if (IEM_IS_MODRM_REG_MODE(bRm))
473 {
474 /*
475 * Register, register.
476 */
477 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
478 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
479 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
480 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSsse3);
482 IEM_MC_ARG(uint64_t *, pDst, 0);
483 IEM_MC_ARG(uint64_t, uSrc, 1);
484 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
485 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
486 IEM_MC_PREPARE_FPU_USAGE();
487 IEM_MC_FPU_TO_MMX_MODE();
488 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
489 IEM_MC_FETCH_MREG_U64(uSrc, IEM_GET_MODRM_RM_8(bRm));
490 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_palignr_u64, iemAImpl_palignr_u64_fallback),
491 pDst, uSrc, bImmArg);
492 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
493 IEM_MC_ADVANCE_RIP_AND_FINISH();
494 IEM_MC_END();
495 }
496 else
497 {
498 /*
499 * Register, memory.
500 */
501 IEM_MC_BEGIN(3, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
502 IEM_MC_ARG(uint64_t *, pDst, 0);
503 IEM_MC_ARG(uint64_t, uSrc, 1);
504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
505
506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
507 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
508 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSsse3);
510 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
511 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
512
513 IEM_MC_PREPARE_FPU_USAGE();
514 IEM_MC_FPU_TO_MMX_MODE();
515 IEM_MC_REF_MREG_U64(pDst, IEM_GET_MODRM_REG_8(bRm));
516 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_palignr_u64, iemAImpl_palignr_u64_fallback),
517 pDst, uSrc, bImmArg);
518 IEM_MC_MODIFIED_MREG_BY_REF(pDst);
519
520 IEM_MC_ADVANCE_RIP_AND_FINISH();
521 IEM_MC_END();
522 }
523}
524
525
526/** Opcode 0x66 0x0f 0x0f. */
527FNIEMOP_DEF(iemOp_palignr_Vx_Wx_Ib)
528{
529 IEMOP_MNEMONIC3(RMI, PALIGNR, palignr, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
530 return FNIEMOP_CALL_1(iemOpCommonSsse3_FullFullImm8_To_Full,
531 IEM_SELECT_HOST_OR_FALLBACK(fSsse3, iemAImpl_palignr_u128, iemAImpl_palignr_u128_fallback));
532}
533
534
535/* Opcode 0x66 0x0f 0x10 - invalid */
536/* Opcode 0x66 0x0f 0x11 - invalid */
537/* Opcode 0x66 0x0f 0x12 - invalid */
538/* Opcode 0x66 0x0f 0x13 - invalid */
539
540
541/** Opcode 0x66 0x0f 0x14. */
542FNIEMOP_DEF(iemOp_pextrb_RdMb_Vdq_Ib)
543{
544 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
545 IEMOP_MNEMONIC3(MRI, PEXTRB, pextrb, Ev, Vq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
546 if (IEM_IS_MODRM_REG_MODE(bRm))
547 {
548 /*
549 * greg32, XMM.
550 */
551 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
552 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
554 IEM_MC_LOCAL(uint8_t, uValue);
555 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
556 IEM_MC_PREPARE_SSE_USAGE();
557 IEM_MC_FETCH_XREG_U8(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 15 /*a_iByte*/);
558 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uValue);
559 IEM_MC_ADVANCE_RIP_AND_FINISH();
560 IEM_MC_END();
561 }
562 else
563 {
564 /*
565 * [mem8], XMM.
566 */
567 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
568 IEM_MC_LOCAL(uint8_t, uValue);
569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
570
571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
572 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
574 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
575 IEM_MC_PREPARE_SSE_USAGE();
576
577 IEM_MC_FETCH_XREG_U8(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 15 /*a_iByte*/);
578 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uValue);
579 IEM_MC_ADVANCE_RIP_AND_FINISH();
580 IEM_MC_END();
581 }
582}
583
584
585/** Opcode 0x66 0x0f 0x15. */
586FNIEMOP_DEF(iemOp_pextrw_RdMw_Vdq_Ib)
587{
588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
589 IEMOP_MNEMONIC3(MRI, PEXTRW, pextrw, Ev, Vq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
590 if (IEM_IS_MODRM_REG_MODE(bRm))
591 {
592 /*
593 * greg32, XMM.
594 */
595 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
596 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
598 IEM_MC_LOCAL(uint16_t, uValue);
599 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
600 IEM_MC_PREPARE_SSE_USAGE();
601 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7 /*a_iWord*/);
602 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uValue);
603 IEM_MC_ADVANCE_RIP_AND_FINISH();
604 IEM_MC_END();
605 }
606 else
607 {
608 /*
609 * [mem16], XMM.
610 */
611 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
612 IEM_MC_LOCAL(uint16_t, uValue);
613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
614
615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
616 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
618 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
619 IEM_MC_PREPARE_SSE_USAGE();
620
621 IEM_MC_FETCH_XREG_U16(uValue, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 7 /*a_iWord*/);
622 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uValue);
623 IEM_MC_ADVANCE_RIP_AND_FINISH();
624 IEM_MC_END();
625 }
626}
627
628
629FNIEMOP_DEF(iemOp_pextrd_q_RdMw_Vdq_Ib)
630{
631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
632 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
633 {
634 /**
635 * @opcode 0x16
636 * @opcodesub rex.w=1
637 * @oppfx 0x66
638 * @opcpuid sse
639 */
640 IEMOP_MNEMONIC3(MRI, PEXTRQ, pextrq, Ev, Vq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
641 if (IEM_IS_MODRM_REG_MODE(bRm))
642 {
643 /*
644 * greg64, XMM.
645 */
646 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
647 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
649 IEM_MC_LOCAL(uint64_t, uSrc);
650 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
651 IEM_MC_PREPARE_SSE_USAGE();
652 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1 /*a_iQword*/);
653 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
654 IEM_MC_ADVANCE_RIP_AND_FINISH();
655 IEM_MC_END();
656 }
657 else
658 {
659 /*
660 * [mem64], XMM.
661 */
662 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
663 IEM_MC_LOCAL(uint64_t, uSrc);
664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
665
666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
667 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
669 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
670 IEM_MC_PREPARE_SSE_USAGE();
671
672 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1 /*a_iQword*/);
673 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
674 IEM_MC_ADVANCE_RIP_AND_FINISH();
675 IEM_MC_END();
676 }
677 }
678 else
679 {
680 /**
681 * @opdone
682 * @opcode 0x16
683 * @opcodesub rex.w=0
684 * @oppfx 0x66
685 * @opcpuid sse
686 */
687 IEMOP_MNEMONIC3(MRI, PEXTRD, pextrd, Ey, Vd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
688 if (IEM_IS_MODRM_REG_MODE(bRm))
689 {
690 /*
691 * greg32, XMM.
692 */
693 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
694 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
696 IEM_MC_LOCAL(uint32_t, uSrc);
697 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
698 IEM_MC_PREPARE_SSE_USAGE();
699 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3 /*a_iDword*/);
700 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
701 IEM_MC_ADVANCE_RIP_AND_FINISH();
702 IEM_MC_END();
703 }
704 else
705 {
706 /*
707 * [mem32], XMM.
708 */
709 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
710 IEM_MC_LOCAL(uint32_t, uSrc);
711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
712
713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
714 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
716 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
717 IEM_MC_PREPARE_SSE_USAGE();
718 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3 /*a_iDword*/);
719 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
720 IEM_MC_ADVANCE_RIP_AND_FINISH();
721 IEM_MC_END();
722 }
723 }
724}
725
726
727/** Opcode 0x66 0x0f 0x17. */
728FNIEMOP_DEF(iemOp_extractps_Ed_Vdq_Ib)
729{
730 IEMOP_MNEMONIC3(MRI, EXTRACTPS, extractps, Ed, Vdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
732 if (IEM_IS_MODRM_REG_MODE(bRm))
733 {
734 /*
735 * greg32, XMM.
736 */
737 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
738 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
740 IEM_MC_LOCAL(uint32_t, uSrc);
741 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
742 IEM_MC_PREPARE_SSE_USAGE();
743 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3 /*a_iDword*/);
744 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc);
745 IEM_MC_ADVANCE_RIP_AND_FINISH();
746 IEM_MC_END();
747 }
748 else
749 {
750 /*
751 * [mem32], XMM.
752 */
753 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
754 IEM_MC_LOCAL(uint32_t, uSrc);
755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
756
757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
758 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
760 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
761 IEM_MC_PREPARE_SSE_USAGE();
762 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3 /*a_iDword*/);
763 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc);
764 IEM_MC_ADVANCE_RIP_AND_FINISH();
765 IEM_MC_END();
766 }
767}
768
769
770/* Opcode 0x66 0x0f 0x18 - invalid (vex only). */
771/* Opcode 0x66 0x0f 0x19 - invalid (vex only). */
772/* Opcode 0x66 0x0f 0x1a - invalid */
773/* Opcode 0x66 0x0f 0x1b - invalid */
774/* Opcode 0x66 0x0f 0x1c - invalid */
775/* Opcode 0x66 0x0f 0x1d - invalid (vex only). */
776/* Opcode 0x66 0x0f 0x1e - invalid */
777/* Opcode 0x66 0x0f 0x1f - invalid */
778
779
780/** Opcode 0x66 0x0f 0x20. */
781FNIEMOP_DEF(iemOp_pinsrb_Vdq_RyMb_Ib)
782{
783 IEMOP_MNEMONIC3(RMI, PINSRB, pinsrb, Vd, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OP_SIZES);
784 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
785 if (IEM_IS_MODRM_REG_MODE(bRm))
786 {
787 /*
788 * XMM, greg32.
789 */
790 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
791 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
793 IEM_MC_LOCAL(uint8_t, uSrc);
794 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
795 IEM_MC_PREPARE_SSE_USAGE();
796 IEM_MC_FETCH_GREG_U8(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
797 IEM_MC_STORE_XREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 15 /*a_iByte*/, uSrc);
798 IEM_MC_ADVANCE_RIP_AND_FINISH();
799 IEM_MC_END();
800 }
801 else
802 {
803 /*
804 * XMM, [mem8].
805 */
806 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
807 IEM_MC_LOCAL(uint8_t, uSrc);
808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
809
810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
811 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
813 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
814 IEM_MC_PREPARE_SSE_USAGE();
815
816 IEM_MC_FETCH_MEM_U8(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
817 IEM_MC_STORE_XREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 15 /*a_iByte*/, uSrc);
818 IEM_MC_ADVANCE_RIP_AND_FINISH();
819 IEM_MC_END();
820 }
821}
822
823/** Opcode 0x66 0x0f 0x21, */
824FNIEMOP_DEF(iemOp_insertps_Vdq_UdqMd_Ib)
825{
826 IEMOP_MNEMONIC3(RMI, INSERTPS, insertps, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0); /// @todo
827 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
828 if (IEM_IS_MODRM_REG_MODE(bRm))
829 {
830 /*
831 * XMM, XMM.
832 */
833 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
834 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
836 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
837 IEM_MC_PREPARE_SSE_USAGE();
838
839 IEM_MC_LOCAL(uint32_t, uSrc);
840 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), (bImm >> 6) & 3);
841 IEM_MC_CLEAR_XREG_U32_MASK(IEM_GET_MODRM_REG(pVCpu, bRm), bImm);
842 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), (bImm >> 4) & 3, uSrc);
843
844 IEM_MC_ADVANCE_RIP_AND_FINISH();
845 IEM_MC_END();
846 }
847 else
848 {
849 /*
850 * XMM, [mem32].
851 */
852 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
853 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
855 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
856
857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
858 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
859 IEM_MC_PREPARE_SSE_USAGE();
860
861 IEM_MC_LOCAL(uint32_t, uSrc);
862 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
863 IEM_MC_CLEAR_XREG_U32_MASK(IEM_GET_MODRM_REG(pVCpu, bRm), bImm);
864 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), (bImm >> 4) & 3, uSrc);
865 IEM_MC_ADVANCE_RIP_AND_FINISH();
866 IEM_MC_END();
867 }
868}
869
870FNIEMOP_DEF(iemOp_pinsrd_q_Vdq_Ey_Ib)
871{
872 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
873 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
874 {
875 /**
876 * @opcode 0x22
877 * @opcodesub rex.w=1
878 * @oppfx 0x66
879 * @opcpuid sse
880 */
881 IEMOP_MNEMONIC3(RMI, PINSRQ, pinsrq, Vq, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
882 if (IEM_IS_MODRM_REG_MODE(bRm))
883 {
884 /*
885 * XMM, greg64.
886 */
887 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
888 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
890 IEM_MC_LOCAL(uint64_t, uSrc);
891 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
892 IEM_MC_PREPARE_SSE_USAGE();
893 IEM_MC_FETCH_GREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
894 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1 /*a_iQword*/, uSrc);
895 IEM_MC_ADVANCE_RIP_AND_FINISH();
896 IEM_MC_END();
897 }
898 else
899 {
900 /*
901 * XMM, [mem64].
902 */
903 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
904 IEM_MC_LOCAL(uint64_t, uSrc);
905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
906
907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
908 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
910 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
911 IEM_MC_PREPARE_SSE_USAGE();
912
913 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
914 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 1 /*a_iQword*/, uSrc);
915 IEM_MC_ADVANCE_RIP_AND_FINISH();
916 IEM_MC_END();
917 }
918 }
919 else
920 {
921 /**
922 * @opdone
923 * @opcode 0x22
924 * @opcodesub rex.w=0
925 * @oppfx 0x66
926 * @opcpuid sse
927 */
928 IEMOP_MNEMONIC3(RMI, PINSRD, pinsrd, Vd, Ey, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, IEMOPHINT_IGNORES_OZ_PFX);
929 if (IEM_IS_MODRM_REG_MODE(bRm))
930 {
931 /*
932 * XMM, greg32.
933 */
934 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
935 IEM_MC_BEGIN(0, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
937 IEM_MC_LOCAL(uint32_t, uSrc);
938 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
939 IEM_MC_PREPARE_SSE_USAGE();
940 IEM_MC_FETCH_GREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
941 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3 /*a_iDword*/, uSrc);
942 IEM_MC_ADVANCE_RIP_AND_FINISH();
943 IEM_MC_END();
944 }
945 else
946 {
947 /*
948 * XMM, [mem32].
949 */
950 IEM_MC_BEGIN(0, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
951 IEM_MC_LOCAL(uint32_t, uSrc);
952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
953
954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
955 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41);
957 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
958 IEM_MC_PREPARE_SSE_USAGE();
959
960 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
961 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), bImm & 3 /*a_iDword*/, uSrc);
962 IEM_MC_ADVANCE_RIP_AND_FINISH();
963 IEM_MC_END();
964 }
965 }
966}
967
968
969/* Opcode 0x66 0x0f 0x23 - invalid */
970/* Opcode 0x66 0x0f 0x24 - invalid */
971/* Opcode 0x66 0x0f 0x25 - invalid */
972/* Opcode 0x66 0x0f 0x26 - invalid */
973/* Opcode 0x66 0x0f 0x27 - invalid */
974/* Opcode 0x66 0x0f 0x28 - invalid */
975/* Opcode 0x66 0x0f 0x29 - invalid */
976/* Opcode 0x66 0x0f 0x2a - invalid */
977/* Opcode 0x66 0x0f 0x2b - invalid */
978/* Opcode 0x66 0x0f 0x2c - invalid */
979/* Opcode 0x66 0x0f 0x2d - invalid */
980/* Opcode 0x66 0x0f 0x2e - invalid */
981/* Opcode 0x66 0x0f 0x2f - invalid */
982
983
984/* Opcode 0x66 0x0f 0x30 - invalid */
985/* Opcode 0x66 0x0f 0x31 - invalid */
986/* Opcode 0x66 0x0f 0x32 - invalid */
987/* Opcode 0x66 0x0f 0x33 - invalid */
988/* Opcode 0x66 0x0f 0x34 - invalid */
989/* Opcode 0x66 0x0f 0x35 - invalid */
990/* Opcode 0x66 0x0f 0x36 - invalid */
991/* Opcode 0x66 0x0f 0x37 - invalid */
992/* Opcode 0x66 0x0f 0x38 - invalid (vex only). */
993/* Opcode 0x66 0x0f 0x39 - invalid (vex only). */
994/* Opcode 0x66 0x0f 0x3a - invalid */
995/* Opcode 0x66 0x0f 0x3b - invalid */
996/* Opcode 0x66 0x0f 0x3c - invalid */
997/* Opcode 0x66 0x0f 0x3d - invalid */
998/* Opcode 0x66 0x0f 0x3e - invalid */
999/* Opcode 0x66 0x0f 0x3f - invalid */
1000
1001
1002/** Opcode 0x66 0x0f 0x40. */
1003FNIEMOP_DEF(iemOp_dpps_Vx_Wx_Ib)
1004{
1005 IEMOP_MNEMONIC3(RMI, DPPS, dpps, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
1006 return FNIEMOP_CALL_1(iemOpCommonSse41Fp_FullFullImm8_To_Full,
1007 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_dpps_u128, iemAImpl_dpps_u128_fallback));
1008}
1009
1010
1011/** Opcode 0x66 0x0f 0x41, */
1012FNIEMOP_DEF(iemOp_dppd_Vdq_Wdq_Ib)
1013{
1014 IEMOP_MNEMONIC3(RMI, DPPD, dppd, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
1015 return FNIEMOP_CALL_1(iemOpCommonSse41Fp_FullFullImm8_To_Full,
1016 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_dppd_u128, iemAImpl_dppd_u128_fallback));
1017}
1018
1019
1020/** Opcode 0x66 0x0f 0x42. */
1021FNIEMOP_DEF(iemOp_mpsadbw_Vx_Wx_Ib)
1022{
1023 IEMOP_MNEMONIC3(RMI, MPSADBW, mpsadbw, Vx, Wx, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
1024 return FNIEMOP_CALL_1(iemOpCommonSse41_FullFullImm8_To_Full,
1025 IEM_SELECT_HOST_OR_FALLBACK(fSse41, iemAImpl_mpsadbw_u128, iemAImpl_mpsadbw_u128_fallback));
1026}
1027
1028
1029/* Opcode 0x66 0x0f 0x43 - invalid */
1030
1031
1032/** Opcode 0x66 0x0f 0x44. */
1033FNIEMOP_DEF(iemOp_pclmulqdq_Vdq_Wdq_Ib)
1034{
1035 IEMOP_MNEMONIC3(RMI, PCLMULQDQ, pclmulqdq, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
1036
1037 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1038 if (IEM_IS_MODRM_REG_MODE(bRm))
1039 {
1040 /*
1041 * Register, register.
1042 */
1043 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1044 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fPclMul);
1046 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1047 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1048 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
1049 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1050 IEM_MC_PREPARE_SSE_USAGE();
1051 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1052 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1053 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fPclMul,
1054 iemAImpl_pclmulqdq_u128,
1055 iemAImpl_pclmulqdq_u128_fallback),
1056 puDst, puSrc, bImmArg);
1057 IEM_MC_ADVANCE_RIP_AND_FINISH();
1058 IEM_MC_END();
1059 }
1060 else
1061 {
1062 /*
1063 * Register, memory.
1064 */
1065 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1066 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1067 IEM_MC_LOCAL(RTUINT128U, uSrc);
1068 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1069 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1070
1071 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1072 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1073 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
1074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fPclMul);
1075 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1076 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1077
1078 IEM_MC_PREPARE_SSE_USAGE();
1079 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1080 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fPclMul,
1081 iemAImpl_pclmulqdq_u128,
1082 iemAImpl_pclmulqdq_u128_fallback),
1083 puDst, puSrc, bImmArg);
1084
1085 IEM_MC_ADVANCE_RIP_AND_FINISH();
1086 IEM_MC_END();
1087 }
1088}
1089
1090
1091/* Opcode 0x66 0x0f 0x45 - invalid */
1092/* Opcode 0x66 0x0f 0x46 - invalid (vex only) */
1093/* Opcode 0x66 0x0f 0x47 - invalid */
1094/* Opcode 0x66 0x0f 0x48 - invalid */
1095/* Opcode 0x66 0x0f 0x49 - invalid */
1096/* Opcode 0x66 0x0f 0x4a - invalid (vex only). */
1097/* Opcode 0x66 0x0f 0x4b - invalid (vex only). */
1098/* Opcode 0x66 0x0f 0x4c - invalid (vex only). */
1099/* Opcode 0x66 0x0f 0x4d - invalid */
1100/* Opcode 0x66 0x0f 0x4e - invalid */
1101/* Opcode 0x66 0x0f 0x4f - invalid */
1102
1103
1104/* Opcode 0x66 0x0f 0x50 - invalid */
1105/* Opcode 0x66 0x0f 0x51 - invalid */
1106/* Opcode 0x66 0x0f 0x52 - invalid */
1107/* Opcode 0x66 0x0f 0x53 - invalid */
1108/* Opcode 0x66 0x0f 0x54 - invalid */
1109/* Opcode 0x66 0x0f 0x55 - invalid */
1110/* Opcode 0x66 0x0f 0x56 - invalid */
1111/* Opcode 0x66 0x0f 0x57 - invalid */
1112/* Opcode 0x66 0x0f 0x58 - invalid */
1113/* Opcode 0x66 0x0f 0x59 - invalid */
1114/* Opcode 0x66 0x0f 0x5a - invalid */
1115/* Opcode 0x66 0x0f 0x5b - invalid */
1116/* Opcode 0x66 0x0f 0x5c - invalid */
1117/* Opcode 0x66 0x0f 0x5d - invalid */
1118/* Opcode 0x66 0x0f 0x5e - invalid */
1119/* Opcode 0x66 0x0f 0x5f - invalid */
1120
1121
1122/** Opcode 0x66 0x0f 0x60. */
1123FNIEMOP_DEF(iemOp_pcmpestrm_Vdq_Wdq_Ib)
1124{
1125 IEMOP_MNEMONIC3(RMI, PCMPESTRM, pcmpestrm, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
1126
1127 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1128 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1129 {
1130 if (IEM_IS_MODRM_REG_MODE(bRm))
1131 {
1132 /*
1133 * Register, register.
1134 */
1135 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1136 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0);
1137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42);
1138 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1139 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1140 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
1141 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
1142 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1143 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1144 IEM_MC_PREPARE_SSE_USAGE();
1145 IEM_MC_FETCH_XREG_PAIR_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
1146 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
1147 IEM_MC_REF_EFLAGS(pEFlags);
1148 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42,
1149 iemAImpl_pcmpestrm_u128,
1150 iemAImpl_pcmpestrm_u128_fallback),
1151 puDst, pEFlags, pSrc, bImmArg);
1152 IEM_MC_ADVANCE_RIP_AND_FINISH();
1153 IEM_MC_END();
1154 }
1155 else
1156 {
1157 /*
1158 * Register, memory.
1159 */
1160 IEM_MC_BEGIN(4, 3, IEM_MC_F_64BIT, 0);
1161 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
1162 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1163 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
1164 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
1165 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1166
1167 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1168 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1169 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42);
1171 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1172 IEM_MC_PREPARE_SSE_USAGE();
1173
1174 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
1175 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1176 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
1177 IEM_MC_REF_EFLAGS(pEFlags);
1178 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42,
1179 iemAImpl_pcmpestri_u128,
1180 iemAImpl_pcmpestri_u128_fallback),
1181 pu32Ecx, pEFlags, pSrc, bImmArg);
1182 IEM_MC_ADVANCE_RIP_AND_FINISH();
1183 IEM_MC_END();
1184 }
1185 }
1186 else
1187 {
1188 if (IEM_IS_MODRM_REG_MODE(bRm))
1189 {
1190 /*
1191 * Register, register.
1192 */
1193 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1194 IEM_MC_BEGIN(4, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42);
1196 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1197 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1198 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
1199 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
1200 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1201 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1202 IEM_MC_PREPARE_SSE_USAGE();
1203 IEM_MC_FETCH_XREG_PAIR_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
1204 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
1205 IEM_MC_REF_EFLAGS(pEFlags);
1206 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42,
1207 iemAImpl_pcmpestrm_u128,
1208 iemAImpl_pcmpestrm_u128_fallback),
1209 puDst, pEFlags, pSrc, bImmArg);
1210 IEM_MC_ADVANCE_RIP_AND_FINISH();
1211 IEM_MC_END();
1212 }
1213 else
1214 {
1215 /*
1216 * Register, memory.
1217 */
1218 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
1219 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
1220 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1221 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
1222 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
1223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1224
1225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1226 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1227 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42);
1229 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1230 IEM_MC_PREPARE_SSE_USAGE();
1231
1232 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
1233 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1234 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
1235 IEM_MC_REF_EFLAGS(pEFlags);
1236 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42,
1237 iemAImpl_pcmpestri_u128,
1238 iemAImpl_pcmpestri_u128_fallback),
1239 pu32Ecx, pEFlags, pSrc, bImmArg);
1240 IEM_MC_ADVANCE_RIP_AND_FINISH();
1241 IEM_MC_END();
1242 }
1243 }
1244}
1245
1246
1247/** Opcode 0x66 0x0f 0x61, */
1248FNIEMOP_DEF(iemOp_pcmpestri_Vdq_Wdq_Ib)
1249{
1250 IEMOP_MNEMONIC3(RMI, PCMPESTRI, pcmpestri, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
1251
1252 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1253 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
1254 {
1255 if (IEM_IS_MODRM_REG_MODE(bRm))
1256 {
1257 /*
1258 * Register, register.
1259 */
1260 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1261 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0);
1262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42);
1263 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
1264 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1265 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
1266 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
1267 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1268 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1269 IEM_MC_PREPARE_SSE_USAGE();
1270 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
1271 IEM_MC_FETCH_XREG_PAIR_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
1272 IEM_MC_REF_EFLAGS(pEFlags);
1273 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42,
1274 iemAImpl_pcmpestri_u128,
1275 iemAImpl_pcmpestri_u128_fallback),
1276 pu32Ecx, pEFlags, pSrc, bImmArg);
1277 /** @todo testcase: High dword of RCX cleared? */
1278 IEM_MC_ADVANCE_RIP_AND_FINISH();
1279 IEM_MC_END();
1280 }
1281 else
1282 {
1283 /*
1284 * Register, memory.
1285 */
1286 IEM_MC_BEGIN(4, 3, IEM_MC_F_64BIT, 0);
1287 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
1288 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1289 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
1290 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
1291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1292
1293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1294 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1295 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42);
1297 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1298 IEM_MC_PREPARE_SSE_USAGE();
1299
1300 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
1301 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1302 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
1303 IEM_MC_REF_EFLAGS(pEFlags);
1304 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42,
1305 iemAImpl_pcmpestri_u128,
1306 iemAImpl_pcmpestri_u128_fallback),
1307 pu32Ecx, pEFlags, pSrc, bImmArg);
1308 /** @todo testcase: High dword of RCX cleared? */
1309 IEM_MC_ADVANCE_RIP_AND_FINISH();
1310 IEM_MC_END();
1311 }
1312 }
1313 else
1314 {
1315 if (IEM_IS_MODRM_REG_MODE(bRm))
1316 {
1317 /*
1318 * Register, register.
1319 */
1320 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1321 IEM_MC_BEGIN(4, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42);
1323 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
1324 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1325 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
1326 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
1327 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1328 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1329 IEM_MC_PREPARE_SSE_USAGE();
1330 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
1331 IEM_MC_FETCH_XREG_PAIR_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
1332 IEM_MC_REF_EFLAGS(pEFlags);
1333 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42,
1334 iemAImpl_pcmpestri_u128,
1335 iemAImpl_pcmpestri_u128_fallback),
1336 pu32Ecx, pEFlags, pSrc, bImmArg);
1337 /** @todo testcase: High dword of RCX cleared? */
1338 IEM_MC_ADVANCE_RIP_AND_FINISH();
1339 IEM_MC_END();
1340 }
1341 else
1342 {
1343 /*
1344 * Register, memory.
1345 */
1346 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
1347 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
1348 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1349 IEM_MC_LOCAL(IEMPCMPESTRXSRC, Src);
1350 IEM_MC_ARG_LOCAL_REF(PIEMPCMPESTRXSRC, pSrc, Src, 2);
1351 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1352
1353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1354 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1355 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42);
1357 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1358 IEM_MC_PREPARE_SSE_USAGE();
1359
1360 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm),
1361 pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1362 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
1363 IEM_MC_REF_EFLAGS(pEFlags);
1364 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42,
1365 iemAImpl_pcmpestri_u128,
1366 iemAImpl_pcmpestri_u128_fallback),
1367 pu32Ecx, pEFlags, pSrc, bImmArg);
1368 /** @todo testcase: High dword of RCX cleared? */
1369 IEM_MC_ADVANCE_RIP_AND_FINISH();
1370 IEM_MC_END();
1371 }
1372 }
1373}
1374
1375
1376/** Opcode 0x66 0x0f 0x62. */
1377FNIEMOP_DEF(iemOp_pcmpistrm_Vdq_Wdq_Ib)
1378{
1379 IEMOP_MNEMONIC3(RMI, PCMPISTRM, pcmpistrm, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
1380
1381 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1382 if (IEM_IS_MODRM_REG_MODE(bRm))
1383 {
1384 /*
1385 * Register, register.
1386 */
1387 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1388 IEM_MC_BEGIN(4, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42);
1390 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1391 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1392 IEM_MC_LOCAL(IEMPCMPISTRXSRC, Src);
1393 IEM_MC_ARG_LOCAL_REF(PIEMPCMPISTRXSRC, pSrc, Src, 2);
1394 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1395 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1396 IEM_MC_PREPARE_SSE_USAGE();
1397 IEM_MC_FETCH_XREG_PAIR_U128(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
1398 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/);
1399 IEM_MC_REF_EFLAGS(pEFlags);
1400 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42,
1401 iemAImpl_pcmpistrm_u128,
1402 iemAImpl_pcmpistrm_u128_fallback),
1403 puDst, pEFlags, pSrc, bImmArg);
1404 IEM_MC_ADVANCE_RIP_AND_FINISH();
1405 IEM_MC_END();
1406 }
1407 else
1408 {
1409 /*
1410 * Register, memory.
1411 */
1412 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
1413 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
1414 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1415 IEM_MC_LOCAL(IEMPCMPISTRXSRC, Src);
1416 IEM_MC_ARG_LOCAL_REF(PIEMPCMPISTRXSRC, pSrc, Src, 2);
1417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1418
1419 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1420 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1421 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42);
1423 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1424 IEM_MC_PREPARE_SSE_USAGE();
1425
1426 IEM_MC_FETCH_MEM_U128_AND_XREG_U128(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1427 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
1428 IEM_MC_REF_EFLAGS(pEFlags);
1429 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42,
1430 iemAImpl_pcmpistri_u128,
1431 iemAImpl_pcmpistri_u128_fallback),
1432 pu32Ecx, pEFlags, pSrc, bImmArg);
1433 IEM_MC_ADVANCE_RIP_AND_FINISH();
1434 IEM_MC_END();
1435 }
1436}
1437
1438
1439/** Opcode 0x66 0x0f 0x63*/
1440FNIEMOP_DEF(iemOp_pcmpistri_Vdq_Wdq_Ib)
1441{
1442 IEMOP_MNEMONIC3(RMI, PCMPISTRI, pcmpistri, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
1443
1444 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1445 if (IEM_IS_MODRM_REG_MODE(bRm))
1446 {
1447 /*
1448 * Register, register.
1449 */
1450 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1451 IEM_MC_BEGIN(4, 1, IEM_MC_F_NOT_286_OR_OLDER, 0);
1452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42);
1453 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
1454 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1455 IEM_MC_LOCAL(IEMPCMPISTRXSRC, Src);
1456 IEM_MC_ARG_LOCAL_REF(PIEMPCMPISTRXSRC, pSrc, Src, 2);
1457 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1458 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1459 IEM_MC_PREPARE_SSE_USAGE();
1460 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
1461 IEM_MC_FETCH_XREG_PAIR_U128(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm));
1462 IEM_MC_REF_EFLAGS(pEFlags);
1463 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42,
1464 iemAImpl_pcmpistri_u128,
1465 iemAImpl_pcmpistri_u128_fallback),
1466 pu32Ecx, pEFlags, pSrc, bImmArg);
1467 /** @todo testcase: High dword of RCX cleared? */
1468 IEM_MC_ADVANCE_RIP_AND_FINISH();
1469 IEM_MC_END();
1470 }
1471 else
1472 {
1473 /*
1474 * Register, memory.
1475 */
1476 IEM_MC_BEGIN(4, 3, IEM_MC_F_NOT_286_OR_OLDER, 0);
1477 IEM_MC_ARG(uint32_t *, pu32Ecx, 0);
1478 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1479 IEM_MC_LOCAL(IEMPCMPISTRXSRC, Src);
1480 IEM_MC_ARG_LOCAL_REF(PIEMPCMPISTRXSRC, pSrc, Src, 2);
1481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1482
1483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1484 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1485 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3);
1486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42);
1487 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1488 IEM_MC_PREPARE_SSE_USAGE();
1489
1490 IEM_MC_FETCH_MEM_U128_AND_XREG_U128(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1491 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX);
1492 IEM_MC_REF_EFLAGS(pEFlags);
1493 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42,
1494 iemAImpl_pcmpistri_u128,
1495 iemAImpl_pcmpistri_u128_fallback),
1496 pu32Ecx, pEFlags, pSrc, bImmArg);
1497 /** @todo testcase: High dword of RCX cleared? */
1498 IEM_MC_ADVANCE_RIP_AND_FINISH();
1499 IEM_MC_END();
1500 }
1501}
1502
1503
1504/* Opcode 0x66 0x0f 0x64 - invalid */
1505/* Opcode 0x66 0x0f 0x65 - invalid */
1506/* Opcode 0x66 0x0f 0x66 - invalid */
1507/* Opcode 0x66 0x0f 0x67 - invalid */
1508/* Opcode 0x66 0x0f 0x68 - invalid */
1509/* Opcode 0x66 0x0f 0x69 - invalid */
1510/* Opcode 0x66 0x0f 0x6a - invalid */
1511/* Opcode 0x66 0x0f 0x6b - invalid */
1512/* Opcode 0x66 0x0f 0x6c - invalid */
1513/* Opcode 0x66 0x0f 0x6d - invalid */
1514/* Opcode 0x66 0x0f 0x6e - invalid */
1515/* Opcode 0x66 0x0f 0x6f - invalid */
1516
1517/* Opcodes 0x0f 0x70 thru 0x0f 0xb0 are unused. */
1518
1519
1520/* Opcode 0x0f 0xc0 - invalid */
1521/* Opcode 0x0f 0xc1 - invalid */
1522/* Opcode 0x0f 0xc2 - invalid */
1523/* Opcode 0x0f 0xc3 - invalid */
1524/* Opcode 0x0f 0xc4 - invalid */
1525/* Opcode 0x0f 0xc5 - invalid */
1526/* Opcode 0x0f 0xc6 - invalid */
1527/* Opcode 0x0f 0xc7 - invalid */
1528/* Opcode 0x0f 0xc8 - invalid */
1529/* Opcode 0x0f 0xc9 - invalid */
1530/* Opcode 0x0f 0xca - invalid */
1531/* Opcode 0x0f 0xcb - invalid */
1532
1533
1534/* Opcode 0x0f 0xcc */
1535FNIEMOP_DEF(iemOp_sha1rnds4_Vdq_Wdq_Ib)
1536{
1537 IEMOP_MNEMONIC3(RMI, SHA1RNDS4, sha1rnds4, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
1538
1539 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1540 if (IEM_IS_MODRM_REG_MODE(bRm))
1541 {
1542 /*
1543 * XMM, XMM, imm8
1544 */
1545 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1546 IEM_MC_BEGIN(3, 0, IEM_MC_F_NOT_286_OR_OLDER, 0);
1547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSha);
1548 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1549 IEM_MC_ARG(PCRTUINT128U, puSrc, 1);
1550 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
1551 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1552 IEM_MC_PREPARE_SSE_USAGE();
1553 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1554 IEM_MC_REF_XREG_U128_CONST(puSrc, IEM_GET_MODRM_RM(pVCpu, bRm));
1555 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fSha,
1556 iemAImpl_sha1rnds4_u128,
1557 iemAImpl_sha1rnds4_u128_fallback),
1558 puDst, puSrc, bImmArg);
1559 IEM_MC_ADVANCE_RIP_AND_FINISH();
1560 IEM_MC_END();
1561 }
1562 else
1563 {
1564 /*
1565 * XMM, [mem128], imm8.
1566 */
1567 IEM_MC_BEGIN(3, 2, IEM_MC_F_NOT_286_OR_OLDER, 0);
1568 IEM_MC_ARG(PRTUINT128U, puDst, 0);
1569 IEM_MC_LOCAL(RTUINT128U, uSrc);
1570 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1);
1571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1572
1573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
1574 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
1575 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2);
1576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSha);
1577 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT();
1578 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1579
1580 IEM_MC_PREPARE_SSE_USAGE();
1581 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm));
1582 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fSha,
1583 iemAImpl_sha1rnds4_u128,
1584 iemAImpl_sha1rnds4_u128_fallback),
1585 puDst, puSrc, bImmArg);
1586 IEM_MC_ADVANCE_RIP_AND_FINISH();
1587 IEM_MC_END();
1588 }
1589}
1590
1591
1592/* Opcode 0x0f 0xcd - invalid */
1593/* Opcode 0x0f 0xce - invalid */
1594/* Opcode 0x0f 0xcf - invalid */
1595
1596
1597/* Opcode 0x66 0x0f 0xd0 - invalid */
1598/* Opcode 0x66 0x0f 0xd1 - invalid */
1599/* Opcode 0x66 0x0f 0xd2 - invalid */
1600/* Opcode 0x66 0x0f 0xd3 - invalid */
1601/* Opcode 0x66 0x0f 0xd4 - invalid */
1602/* Opcode 0x66 0x0f 0xd5 - invalid */
1603/* Opcode 0x66 0x0f 0xd6 - invalid */
1604/* Opcode 0x66 0x0f 0xd7 - invalid */
1605/* Opcode 0x66 0x0f 0xd8 - invalid */
1606/* Opcode 0x66 0x0f 0xd9 - invalid */
1607/* Opcode 0x66 0x0f 0xda - invalid */
1608/* Opcode 0x66 0x0f 0xdb - invalid */
1609/* Opcode 0x66 0x0f 0xdc - invalid */
1610/* Opcode 0x66 0x0f 0xdd - invalid */
1611/* Opcode 0x66 0x0f 0xde - invalid */
1612
1613
1614/* Opcode 0x66 0x0f 0xdf - (aeskeygenassist). */
1615FNIEMOP_DEF(iemOp_aeskeygen_Vdq_Wdq_Ib)
1616{
1617 IEMOP_MNEMONIC3(RMI, AESKEYGEN, aeskeygen, Vdq, Wdq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_X86_SSE, 0);
1618 return FNIEMOP_CALL_1(iemOpCommonAesNi_FullFullImm8_To_Full,
1619 IEM_SELECT_HOST_OR_FALLBACK(fAesNi, iemAImpl_aeskeygenassist_u128, iemAImpl_aeskeygenassist_u128_fallback));
1620}
1621
1622
1623/* Opcode 0xf2 0x0f 0xf0 - invalid (vex only) */
1624
1625
1626/**
1627 * Three byte opcode map, first two bytes are 0x0f 0x3a.
1628 * @sa g_apfnVexMap2
1629 */
1630const PFNIEMOP g_apfnThreeByte0f3a[] =
1631{
1632 /* no prefix, 066h prefix f3h prefix, f2h prefix */
1633 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1634 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1635 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1636 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1637 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1638 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1639 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1640 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1641 /* 0x08 */ iemOp_InvalidNeedRMImm8, iemOp_roundps_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1642 /* 0x09 */ iemOp_InvalidNeedRMImm8, iemOp_roundpd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1643 /* 0x0a */ iemOp_InvalidNeedRMImm8, iemOp_roundss_Vss_Wss_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1644 /* 0x0b */ iemOp_InvalidNeedRMImm8, iemOp_roundsd_Vsd_Wsd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1645 /* 0x0c */ iemOp_InvalidNeedRMImm8, iemOp_blendps_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1646 /* 0x0d */ iemOp_InvalidNeedRMImm8, iemOp_blendpd_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1647 /* 0x0e */ iemOp_InvalidNeedRMImm8, iemOp_pblendw_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1648 /* 0x0f */ iemOp_palignr_Pq_Qq_Ib, iemOp_palignr_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1649
1650 /* 0x10 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1651 /* 0x11 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1652 /* 0x12 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1653 /* 0x13 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1654 /* 0x14 */ iemOp_InvalidNeedRMImm8, iemOp_pextrb_RdMb_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1655 /* 0x15 */ iemOp_InvalidNeedRMImm8, iemOp_pextrw_RdMw_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1656 /* 0x16 */ iemOp_InvalidNeedRMImm8, iemOp_pextrd_q_RdMw_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1657 /* 0x17 */ iemOp_InvalidNeedRMImm8, iemOp_extractps_Ed_Vdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1658 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1659 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1660 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1661 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1662 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1663 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1664 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1665 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1666
1667 /* 0x20 */ iemOp_InvalidNeedRMImm8, iemOp_pinsrb_Vdq_RyMb_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1668 /* 0x21 */ iemOp_InvalidNeedRMImm8, iemOp_insertps_Vdq_UdqMd_Ib,iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1669 /* 0x22 */ iemOp_InvalidNeedRMImm8, iemOp_pinsrd_q_Vdq_Ey_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1670 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1671 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1672 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1673 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1674 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1675 /* 0x28 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1676 /* 0x29 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1677 /* 0x2a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1678 /* 0x2b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1679 /* 0x2c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1680 /* 0x2d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1681 /* 0x2e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1682 /* 0x2f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1683
1684 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1685 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1686 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1687 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1688 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1689 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1690 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1691 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1692 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1693 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1694 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1695 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1696 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1697 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1698 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1699 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1700
1701 /* 0x40 */ iemOp_InvalidNeedRMImm8, iemOp_dpps_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1702 /* 0x41 */ iemOp_InvalidNeedRMImm8, iemOp_dppd_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1703 /* 0x42 */ iemOp_InvalidNeedRMImm8, iemOp_mpsadbw_Vx_Wx_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1704 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1705 /* 0x44 */ iemOp_InvalidNeedRMImm8, iemOp_pclmulqdq_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1706 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1707 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1708 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1709 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1710 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1711 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1712 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1713 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1714 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1715 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1716 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1717
1718 /* 0x50 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1719 /* 0x51 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1720 /* 0x52 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1721 /* 0x53 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1722 /* 0x54 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1723 /* 0x55 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1724 /* 0x56 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1725 /* 0x57 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1726 /* 0x58 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1727 /* 0x59 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1728 /* 0x5a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1729 /* 0x5b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1730 /* 0x5c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1731 /* 0x5d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1732 /* 0x5e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1733 /* 0x5f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1734
1735 /* 0x60 */ iemOp_InvalidNeedRMImm8, iemOp_pcmpestrm_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1736 /* 0x61 */ iemOp_InvalidNeedRMImm8, iemOp_pcmpestri_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1737 /* 0x62 */ iemOp_InvalidNeedRMImm8, iemOp_pcmpistrm_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1738 /* 0x63 */ iemOp_InvalidNeedRMImm8, iemOp_pcmpistri_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1739 /* 0x64 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1740 /* 0x65 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1741 /* 0x66 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1742 /* 0x67 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1743 /* 0x68 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1744 /* 0x69 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1745 /* 0x6a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1746 /* 0x6b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1747 /* 0x6c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1748 /* 0x6d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1749 /* 0x6e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1750 /* 0x6f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1751
1752 /* 0x70 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1753 /* 0x71 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1754 /* 0x72 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1755 /* 0x73 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1756 /* 0x74 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1757 /* 0x75 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1758 /* 0x76 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1759 /* 0x77 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1760 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1761 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1762 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1763 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1764 /* 0x7c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1765 /* 0x7d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1766 /* 0x7e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1767 /* 0x7f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1768
1769 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1770 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1771 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1772 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1773 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1774 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1775 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1776 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1777 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1778 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1779 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1780 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1781 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1782 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1783 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1784 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1785
1786 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1787 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1788 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1789 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1790 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1791 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1792 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1793 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1794 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1795 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1796 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1797 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1798 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1799 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1800 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1801 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1802
1803 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1804 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1805 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1806 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1807 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1808 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1809 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1810 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1811 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1812 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1813 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1814 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1815 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1816 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1817 /* 0xae */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1818 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1819
1820 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1821 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1822 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1823 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1824 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1825 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1826 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1827 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1828 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1829 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1830 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1831 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1832 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1833 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1834 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1835 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1836
1837 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1838 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1839 /* 0xc2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1840 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1841 /* 0xc4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1842 /* 0xc5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1843 /* 0xc6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1844 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1845 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1846 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1847 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1848 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1849 /* 0xcc */ iemOp_sha1rnds4_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1850 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1851 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1852 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1853
1854 /* 0xd0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1855 /* 0xd1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1856 /* 0xd2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1857 /* 0xd3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1858 /* 0xd4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1859 /* 0xd5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1860 /* 0xd6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1861 /* 0xd7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1862 /* 0xd8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1863 /* 0xd9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1864 /* 0xda */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1865 /* 0xdb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1866 /* 0xdc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1867 /* 0xdd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1868 /* 0xde */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1869 /* 0xdf */ iemOp_InvalidNeedRMImm8, iemOp_aeskeygen_Vdq_Wdq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,
1870
1871 /* 0xe0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1872 /* 0xe1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1873 /* 0xe2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1874 /* 0xe3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1875 /* 0xe4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1876 /* 0xe5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1877 /* 0xe6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1878 /* 0xe7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1879 /* 0xe8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1880 /* 0xe9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1881 /* 0xea */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1882 /* 0xeb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1883 /* 0xec */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1884 /* 0xed */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1885 /* 0xee */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1886 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1887
1888 /* 0xf0 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1889 /* 0xf1 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1890 /* 0xf2 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1891 /* 0xf3 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1892 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1893 /* 0xf5 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1894 /* 0xf6 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1895 /* 0xf7 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1896 /* 0xf8 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1897 /* 0xf9 */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1898 /* 0xfa */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1899 /* 0xfb */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1900 /* 0xfc */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1901 /* 0xfd */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1902 /* 0xfe */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1903 /* 0xff */ IEMOP_X4(iemOp_InvalidNeedRMImm8),
1904};
1905AssertCompile(RT_ELEMENTS(g_apfnThreeByte0f3a) == 1024);
1906
1907/** @} */
1908
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette