VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 49249

Last change on this file since 49249 was 49039, checked in by vboxsync, 11 years ago

IPRT: Filename extension versus suffix cleanup, long overdue.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 586.1 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 49039 2013-10-10 18:27:32Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2013 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/**
26 * Common worker for instructions like ADD, AND, OR, ++ with a byte
27 * memory/register as the destination.
28 *
29 * @param pImpl Pointer to the instruction implementation (assembly).
30 */
31FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
32{
33 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
34
35 /*
36 * If rm is denoting a register, no more instruction bytes.
37 */
38 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
39 {
40 IEMOP_HLP_NO_LOCK_PREFIX();
41
42 IEM_MC_BEGIN(3, 0);
43 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
44 IEM_MC_ARG(uint8_t, u8Src, 1);
45 IEM_MC_ARG(uint32_t *, pEFlags, 2);
46
47 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
48 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
49 IEM_MC_REF_EFLAGS(pEFlags);
50 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
51
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * We're accessing memory.
59 * Note! We're putting the eflags on the stack here so we can commit them
60 * after the memory.
61 */
62 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
63 IEM_MC_BEGIN(3, 2);
64 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
65 IEM_MC_ARG(uint8_t, u8Src, 1);
66 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
70 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
71 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
72 IEM_MC_FETCH_EFLAGS(EFlags);
73 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
74 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
75 else
76 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
77
78 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
79 IEM_MC_COMMIT_EFLAGS(EFlags);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/**
88 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
89 * memory/register as the destination.
90 *
91 * @param pImpl Pointer to the instruction implementation (assembly).
92 */
93FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
94{
95 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
96
97 /*
98 * If rm is denoting a register, no more instruction bytes.
99 */
100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
101 {
102 IEMOP_HLP_NO_LOCK_PREFIX();
103
104 switch (pIemCpu->enmEffOpSize)
105 {
106 case IEMMODE_16BIT:
107 IEM_MC_BEGIN(3, 0);
108 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
109 IEM_MC_ARG(uint16_t, u16Src, 1);
110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
111
112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
114 IEM_MC_REF_EFLAGS(pEFlags);
115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 break;
120
121 case IEMMODE_32BIT:
122 IEM_MC_BEGIN(3, 0);
123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
124 IEM_MC_ARG(uint32_t, u32Src, 1);
125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
126
127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
128 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
129 IEM_MC_REF_EFLAGS(pEFlags);
130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
131
132 if (pImpl != &g_iemAImpl_test)
133 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
134 IEM_MC_ADVANCE_RIP();
135 IEM_MC_END();
136 break;
137
138 case IEMMODE_64BIT:
139 IEM_MC_BEGIN(3, 0);
140 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
141 IEM_MC_ARG(uint64_t, u64Src, 1);
142 IEM_MC_ARG(uint32_t *, pEFlags, 2);
143
144 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
145 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
146 IEM_MC_REF_EFLAGS(pEFlags);
147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
148
149 IEM_MC_ADVANCE_RIP();
150 IEM_MC_END();
151 break;
152 }
153 }
154 else
155 {
156 /*
157 * We're accessing memory.
158 * Note! We're putting the eflags on the stack here so we can commit them
159 * after the memory.
160 */
161 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
162 switch (pIemCpu->enmEffOpSize)
163 {
164 case IEMMODE_16BIT:
165 IEM_MC_BEGIN(3, 2);
166 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
167 IEM_MC_ARG(uint16_t, u16Src, 1);
168 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
170
171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
172 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
173 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
174 IEM_MC_FETCH_EFLAGS(EFlags);
175 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
177 else
178 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
179
180 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
181 IEM_MC_COMMIT_EFLAGS(EFlags);
182 IEM_MC_ADVANCE_RIP();
183 IEM_MC_END();
184 break;
185
186 case IEMMODE_32BIT:
187 IEM_MC_BEGIN(3, 2);
188 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
189 IEM_MC_ARG(uint32_t, u32Src, 1);
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
192
193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
194 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
195 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
196 IEM_MC_FETCH_EFLAGS(EFlags);
197 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
199 else
200 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
201
202 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
203 IEM_MC_COMMIT_EFLAGS(EFlags);
204 IEM_MC_ADVANCE_RIP();
205 IEM_MC_END();
206 break;
207
208 case IEMMODE_64BIT:
209 IEM_MC_BEGIN(3, 2);
210 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
211 IEM_MC_ARG(uint64_t, u64Src, 1);
212 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
214
215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
216 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
217 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
218 IEM_MC_FETCH_EFLAGS(EFlags);
219 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
221 else
222 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
223
224 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
225 IEM_MC_COMMIT_EFLAGS(EFlags);
226 IEM_MC_ADVANCE_RIP();
227 IEM_MC_END();
228 break;
229 }
230 }
231 return VINF_SUCCESS;
232}
233
234
235/**
236 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
237 * the destination.
238 *
239 * @param pImpl Pointer to the instruction implementation (assembly).
240 */
241FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
242{
243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
244 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
245
246 /*
247 * If rm is denoting a register, no more instruction bytes.
248 */
249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
250 {
251 IEM_MC_BEGIN(3, 0);
252 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
253 IEM_MC_ARG(uint8_t, u8Src, 1);
254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
255
256 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
257 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
258 IEM_MC_REF_EFLAGS(pEFlags);
259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
260
261 IEM_MC_ADVANCE_RIP();
262 IEM_MC_END();
263 }
264 else
265 {
266 /*
267 * We're accessing memory.
268 */
269 IEM_MC_BEGIN(3, 1);
270 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
271 IEM_MC_ARG(uint8_t, u8Src, 1);
272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
274
275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
276 IEM_MC_FETCH_MEM_U8(u8Src, pIemCpu->iEffSeg, GCPtrEffDst);
277 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
278 IEM_MC_REF_EFLAGS(pEFlags);
279 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
280
281 IEM_MC_ADVANCE_RIP();
282 IEM_MC_END();
283 }
284 return VINF_SUCCESS;
285}
286
287
288/**
289 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
290 * register as the destination.
291 *
292 * @param pImpl Pointer to the instruction implementation (assembly).
293 */
294FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
295{
296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
297 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
298
299 /*
300 * If rm is denoting a register, no more instruction bytes.
301 */
302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
303 {
304 switch (pIemCpu->enmEffOpSize)
305 {
306 case IEMMODE_16BIT:
307 IEM_MC_BEGIN(3, 0);
308 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
309 IEM_MC_ARG(uint16_t, u16Src, 1);
310 IEM_MC_ARG(uint32_t *, pEFlags, 2);
311
312 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
313 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
314 IEM_MC_REF_EFLAGS(pEFlags);
315 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
316
317 IEM_MC_ADVANCE_RIP();
318 IEM_MC_END();
319 break;
320
321 case IEMMODE_32BIT:
322 IEM_MC_BEGIN(3, 0);
323 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
324 IEM_MC_ARG(uint32_t, u32Src, 1);
325 IEM_MC_ARG(uint32_t *, pEFlags, 2);
326
327 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
328 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
329 IEM_MC_REF_EFLAGS(pEFlags);
330 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
331
332 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
333 IEM_MC_ADVANCE_RIP();
334 IEM_MC_END();
335 break;
336
337 case IEMMODE_64BIT:
338 IEM_MC_BEGIN(3, 0);
339 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
340 IEM_MC_ARG(uint64_t, u64Src, 1);
341 IEM_MC_ARG(uint32_t *, pEFlags, 2);
342
343 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
344 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
345 IEM_MC_REF_EFLAGS(pEFlags);
346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
347
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 break;
351 }
352 }
353 else
354 {
355 /*
356 * We're accessing memory.
357 */
358 switch (pIemCpu->enmEffOpSize)
359 {
360 case IEMMODE_16BIT:
361 IEM_MC_BEGIN(3, 1);
362 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
363 IEM_MC_ARG(uint16_t, u16Src, 1);
364 IEM_MC_ARG(uint32_t *, pEFlags, 2);
365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
366
367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
368 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffDst);
369 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
370 IEM_MC_REF_EFLAGS(pEFlags);
371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
372
373 IEM_MC_ADVANCE_RIP();
374 IEM_MC_END();
375 break;
376
377 case IEMMODE_32BIT:
378 IEM_MC_BEGIN(3, 1);
379 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
380 IEM_MC_ARG(uint32_t, u32Src, 1);
381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
383
384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
385 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffDst);
386 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
387 IEM_MC_REF_EFLAGS(pEFlags);
388 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
389
390 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
391 IEM_MC_ADVANCE_RIP();
392 IEM_MC_END();
393 break;
394
395 case IEMMODE_64BIT:
396 IEM_MC_BEGIN(3, 1);
397 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
398 IEM_MC_ARG(uint64_t, u64Src, 1);
399 IEM_MC_ARG(uint32_t *, pEFlags, 2);
400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
401
402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
403 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffDst);
404 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
405 IEM_MC_REF_EFLAGS(pEFlags);
406 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
407
408 IEM_MC_ADVANCE_RIP();
409 IEM_MC_END();
410 break;
411 }
412 }
413 return VINF_SUCCESS;
414}
415
416
417/**
418 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
419 * a byte immediate.
420 *
421 * @param pImpl Pointer to the instruction implementation (assembly).
422 */
423FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
424{
425 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
426 IEMOP_HLP_NO_LOCK_PREFIX();
427
428 IEM_MC_BEGIN(3, 0);
429 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
430 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
431 IEM_MC_ARG(uint32_t *, pEFlags, 2);
432
433 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
434 IEM_MC_REF_EFLAGS(pEFlags);
435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
436
437 IEM_MC_ADVANCE_RIP();
438 IEM_MC_END();
439 return VINF_SUCCESS;
440}
441
442
443/**
444 * Common worker for instructions like ADD, AND, OR, ++ with working on
445 * AX/EAX/RAX with a word/dword immediate.
446 *
447 * @param pImpl Pointer to the instruction implementation (assembly).
448 */
449FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
450{
451 switch (pIemCpu->enmEffOpSize)
452 {
453 case IEMMODE_16BIT:
454 {
455 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
456 IEMOP_HLP_NO_LOCK_PREFIX();
457
458 IEM_MC_BEGIN(3, 0);
459 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
460 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
461 IEM_MC_ARG(uint32_t *, pEFlags, 2);
462
463 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
464 IEM_MC_REF_EFLAGS(pEFlags);
465 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
466
467 IEM_MC_ADVANCE_RIP();
468 IEM_MC_END();
469 return VINF_SUCCESS;
470 }
471
472 case IEMMODE_32BIT:
473 {
474 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
475 IEMOP_HLP_NO_LOCK_PREFIX();
476
477 IEM_MC_BEGIN(3, 0);
478 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
479 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
480 IEM_MC_ARG(uint32_t *, pEFlags, 2);
481
482 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
483 IEM_MC_REF_EFLAGS(pEFlags);
484 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
485
486 if (pImpl != &g_iemAImpl_test)
487 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 return VINF_SUCCESS;
491 }
492
493 case IEMMODE_64BIT:
494 {
495 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
496 IEMOP_HLP_NO_LOCK_PREFIX();
497
498 IEM_MC_BEGIN(3, 0);
499 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
500 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
501 IEM_MC_ARG(uint32_t *, pEFlags, 2);
502
503 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
504 IEM_MC_REF_EFLAGS(pEFlags);
505 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
506
507 IEM_MC_ADVANCE_RIP();
508 IEM_MC_END();
509 return VINF_SUCCESS;
510 }
511
512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
513 }
514}
515
516
517/** Opcodes 0xf1, 0xd6. */
518FNIEMOP_DEF(iemOp_Invalid)
519{
520 IEMOP_MNEMONIC("Invalid");
521 return IEMOP_RAISE_INVALID_OPCODE();
522}
523
524
525
526/** @name ..... opcodes.
527 *
528 * @{
529 */
530
531/** @} */
532
533
534/** @name Two byte opcodes (first byte 0x0f).
535 *
536 * @{
537 */
538
539/** Opcode 0x0f 0x00 /0. */
540FNIEMOP_DEF_1(iemOp_Grp6_sldt, uint8_t, bRm)
541{
542 IEMOP_MNEMONIC("sldt Rv/Mw");
543 IEMOP_HLP_NO_REAL_OR_V86_MODE();
544
545 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
546 {
547 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
548 switch (pIemCpu->enmEffOpSize)
549 {
550 case IEMMODE_16BIT:
551 IEM_MC_BEGIN(0, 1);
552 IEM_MC_LOCAL(uint16_t, u16Ldtr);
553 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
554 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Ldtr);
555 IEM_MC_ADVANCE_RIP();
556 IEM_MC_END();
557 break;
558
559 case IEMMODE_32BIT:
560 IEM_MC_BEGIN(0, 1);
561 IEM_MC_LOCAL(uint32_t, u32Ldtr);
562 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
563 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Ldtr);
564 IEM_MC_ADVANCE_RIP();
565 IEM_MC_END();
566 break;
567
568 case IEMMODE_64BIT:
569 IEM_MC_BEGIN(0, 1);
570 IEM_MC_LOCAL(uint64_t, u64Ldtr);
571 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
572 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Ldtr);
573 IEM_MC_ADVANCE_RIP();
574 IEM_MC_END();
575 break;
576
577 IEM_NOT_REACHED_DEFAULT_CASE_RET();
578 }
579 }
580 else
581 {
582 IEM_MC_BEGIN(0, 2);
583 IEM_MC_LOCAL(uint16_t, u16Ldtr);
584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
586 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
587 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
588 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Ldtr);
589 IEM_MC_ADVANCE_RIP();
590 IEM_MC_END();
591 }
592 return VINF_SUCCESS;
593}
594
595
596/** Opcode 0x0f 0x00 /1. */
597FNIEMOP_DEF_1(iemOp_Grp6_str, uint8_t, bRm)
598{
599 IEMOP_MNEMONIC("str Rv/Mw");
600 IEMOP_HLP_NO_REAL_OR_V86_MODE();
601
602 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
603 {
604 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
605 switch (pIemCpu->enmEffOpSize)
606 {
607 case IEMMODE_16BIT:
608 IEM_MC_BEGIN(0, 1);
609 IEM_MC_LOCAL(uint16_t, u16Tr);
610 IEM_MC_FETCH_TR_U16(u16Tr);
611 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tr);
612 IEM_MC_ADVANCE_RIP();
613 IEM_MC_END();
614 break;
615
616 case IEMMODE_32BIT:
617 IEM_MC_BEGIN(0, 1);
618 IEM_MC_LOCAL(uint32_t, u32Tr);
619 IEM_MC_FETCH_TR_U32(u32Tr);
620 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tr);
621 IEM_MC_ADVANCE_RIP();
622 IEM_MC_END();
623 break;
624
625 case IEMMODE_64BIT:
626 IEM_MC_BEGIN(0, 1);
627 IEM_MC_LOCAL(uint64_t, u64Tr);
628 IEM_MC_FETCH_TR_U64(u64Tr);
629 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tr);
630 IEM_MC_ADVANCE_RIP();
631 IEM_MC_END();
632 break;
633
634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
635 }
636 }
637 else
638 {
639 IEM_MC_BEGIN(0, 2);
640 IEM_MC_LOCAL(uint16_t, u16Tr);
641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
643 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
644 IEM_MC_FETCH_TR_U16(u16Tr);
645 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tr);
646 IEM_MC_ADVANCE_RIP();
647 IEM_MC_END();
648 }
649 return VINF_SUCCESS;
650}
651
652
653/** Opcode 0x0f 0x00 /2. */
654FNIEMOP_DEF_1(iemOp_Grp6_lldt, uint8_t, bRm)
655{
656 IEMOP_MNEMONIC("lldt Ew");
657 IEMOP_HLP_NO_REAL_OR_V86_MODE();
658
659 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
660 {
661 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
662 IEM_MC_BEGIN(1, 0);
663 IEM_MC_ARG(uint16_t, u16Sel, 0);
664 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
665 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
666 IEM_MC_END();
667 }
668 else
669 {
670 IEM_MC_BEGIN(1, 1);
671 IEM_MC_ARG(uint16_t, u16Sel, 0);
672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
674 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
675 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
676 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
677 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
678 IEM_MC_END();
679 }
680 return VINF_SUCCESS;
681}
682
683
684/** Opcode 0x0f 0x00 /3. */
685FNIEMOP_DEF_1(iemOp_Grp6_ltr, uint8_t, bRm)
686{
687 IEMOP_MNEMONIC("ltr Ew");
688 IEMOP_HLP_NO_REAL_OR_V86_MODE();
689
690 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
691 {
692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
693 IEM_MC_BEGIN(1, 0);
694 IEM_MC_ARG(uint16_t, u16Sel, 0);
695 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
696 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
697 IEM_MC_END();
698 }
699 else
700 {
701 IEM_MC_BEGIN(1, 1);
702 IEM_MC_ARG(uint16_t, u16Sel, 0);
703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
706 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
707 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
708 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
709 IEM_MC_END();
710 }
711 return VINF_SUCCESS;
712}
713
714
715/** Opcode 0x0f 0x00 /3. */
716FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
717{
718 IEMOP_HLP_NO_REAL_OR_V86_MODE();
719
720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
721 {
722 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
723 IEM_MC_BEGIN(2, 0);
724 IEM_MC_ARG(uint16_t, u16Sel, 0);
725 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
726 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
727 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
728 IEM_MC_END();
729 }
730 else
731 {
732 IEM_MC_BEGIN(2, 1);
733 IEM_MC_ARG(uint16_t, u16Sel, 0);
734 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
737 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
738 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
739 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
740 IEM_MC_END();
741 }
742 return VINF_SUCCESS;
743}
744
745
746/** Opcode 0x0f 0x00 /4. */
747FNIEMOP_DEF_1(iemOp_Grp6_verr, uint8_t, bRm)
748{
749 IEMOP_MNEMONIC("verr Ew");
750 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
751}
752
753
754/** Opcode 0x0f 0x00 /5. */
755FNIEMOP_DEF_1(iemOp_Grp6_verw, uint8_t, bRm)
756{
757 IEMOP_MNEMONIC("verr Ew");
758 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
759}
760
761
762/** Opcode 0x0f 0x00. */
763FNIEMOP_DEF(iemOp_Grp6)
764{
765 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
766 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
767 {
768 case 0: return FNIEMOP_CALL_1(iemOp_Grp6_sldt, bRm);
769 case 1: return FNIEMOP_CALL_1(iemOp_Grp6_str, bRm);
770 case 2: return FNIEMOP_CALL_1(iemOp_Grp6_lldt, bRm);
771 case 3: return FNIEMOP_CALL_1(iemOp_Grp6_ltr, bRm);
772 case 4: return FNIEMOP_CALL_1(iemOp_Grp6_verr, bRm);
773 case 5: return FNIEMOP_CALL_1(iemOp_Grp6_verw, bRm);
774 case 6: return IEMOP_RAISE_INVALID_OPCODE();
775 case 7: return IEMOP_RAISE_INVALID_OPCODE();
776 IEM_NOT_REACHED_DEFAULT_CASE_RET();
777 }
778
779}
780
781
782/** Opcode 0x0f 0x01 /0. */
783FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
784{
785 IEMOP_MNEMONIC("sgdt Ms");
786 IEMOP_HLP_64BIT_OP_SIZE();
787 IEM_MC_BEGIN(3, 1);
788 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/pIemCpu->iEffSeg, 0);
789 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
790 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
793 IEM_MC_CALL_CIMPL_3(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
794 IEM_MC_END();
795 return VINF_SUCCESS;
796}
797
798
799/** Opcode 0x0f 0x01 /0. */
800FNIEMOP_DEF(iemOp_Grp7_vmcall)
801{
802 IEMOP_BITCH_ABOUT_STUB();
803 return IEMOP_RAISE_INVALID_OPCODE();
804}
805
806
807/** Opcode 0x0f 0x01 /0. */
808FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
809{
810 IEMOP_BITCH_ABOUT_STUB();
811 return IEMOP_RAISE_INVALID_OPCODE();
812}
813
814
815/** Opcode 0x0f 0x01 /0. */
816FNIEMOP_DEF(iemOp_Grp7_vmresume)
817{
818 IEMOP_BITCH_ABOUT_STUB();
819 return IEMOP_RAISE_INVALID_OPCODE();
820}
821
822
823/** Opcode 0x0f 0x01 /0. */
824FNIEMOP_DEF(iemOp_Grp7_vmxoff)
825{
826 IEMOP_BITCH_ABOUT_STUB();
827 return IEMOP_RAISE_INVALID_OPCODE();
828}
829
830
831/** Opcode 0x0f 0x01 /1. */
832FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
833{
834 IEMOP_MNEMONIC("sidt Ms");
835 IEMOP_HLP_64BIT_OP_SIZE();
836 IEM_MC_BEGIN(3, 1);
837 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/pIemCpu->iEffSeg, 0);
838 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
839 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
842 IEM_MC_CALL_CIMPL_3(iemCImpl_sidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
843 IEM_MC_END();
844 return VINF_SUCCESS;
845}
846
847
848/** Opcode 0x0f 0x01 /1. */
849FNIEMOP_DEF(iemOp_Grp7_monitor)
850{
851 IEMOP_MNEMONIC("monitor");
852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
853 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pIemCpu->iEffSeg);
854}
855
856
857/** Opcode 0x0f 0x01 /1. */
858FNIEMOP_DEF(iemOp_Grp7_mwait)
859{
860 IEMOP_MNEMONIC("mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
862 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
863}
864
865
866/** Opcode 0x0f 0x01 /2. */
867FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
868{
869 IEMOP_MNEMONIC("lgdt");
870 IEMOP_HLP_NO_LOCK_PREFIX();
871
872 IEMOP_HLP_64BIT_OP_SIZE();
873 IEM_MC_BEGIN(3, 1);
874 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/pIemCpu->iEffSeg, 0);
875 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
876 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
878 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
879 IEM_MC_END();
880 return VINF_SUCCESS;
881}
882
883
884/** Opcode 0x0f 0x01 /2. */
885FNIEMOP_DEF(iemOp_Grp7_xgetbv)
886{
887 AssertFailed();
888 return IEMOP_RAISE_INVALID_OPCODE();
889}
890
891
892/** Opcode 0x0f 0x01 /2. */
893FNIEMOP_DEF(iemOp_Grp7_xsetbv)
894{
895 AssertFailed();
896 return IEMOP_RAISE_INVALID_OPCODE();
897}
898
899
900/** Opcode 0x0f 0x01 /3. */
901FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
902{
903 IEMOP_HLP_NO_LOCK_PREFIX();
904
905 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
906 ? IEMMODE_64BIT
907 : pIemCpu->enmEffOpSize;
908 IEM_MC_BEGIN(3, 1);
909 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/pIemCpu->iEffSeg, 0);
910 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
911 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
913 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
914 IEM_MC_END();
915 return VINF_SUCCESS;
916}
917
918
919/** Opcode 0x0f 0x01 0xd8. */
920FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
921
922/** Opcode 0x0f 0x01 0xd9. */
923FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
924
925/** Opcode 0x0f 0x01 0xda. */
926FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
927
928/** Opcode 0x0f 0x01 0xdb. */
929FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
930
931/** Opcode 0x0f 0x01 0xdc. */
932FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
933
934/** Opcode 0x0f 0x01 0xdd. */
935FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
936
937/** Opcode 0x0f 0x01 0xde. */
938FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
939
940/** Opcode 0x0f 0x01 0xdf. */
941FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
942
943/** Opcode 0x0f 0x01 /4. */
944FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
945{
946 IEMOP_MNEMONIC("smsw");
947 IEMOP_HLP_NO_LOCK_PREFIX();
948 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
949 {
950 switch (pIemCpu->enmEffOpSize)
951 {
952 case IEMMODE_16BIT:
953 IEM_MC_BEGIN(0, 1);
954 IEM_MC_LOCAL(uint16_t, u16Tmp);
955 IEM_MC_FETCH_CR0_U16(u16Tmp);
956 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tmp);
957 IEM_MC_ADVANCE_RIP();
958 IEM_MC_END();
959 return VINF_SUCCESS;
960
961 case IEMMODE_32BIT:
962 IEM_MC_BEGIN(0, 1);
963 IEM_MC_LOCAL(uint32_t, u32Tmp);
964 IEM_MC_FETCH_CR0_U32(u32Tmp);
965 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
966 IEM_MC_ADVANCE_RIP();
967 IEM_MC_END();
968 return VINF_SUCCESS;
969
970 case IEMMODE_64BIT:
971 IEM_MC_BEGIN(0, 1);
972 IEM_MC_LOCAL(uint64_t, u64Tmp);
973 IEM_MC_FETCH_CR0_U64(u64Tmp);
974 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
975 IEM_MC_ADVANCE_RIP();
976 IEM_MC_END();
977 return VINF_SUCCESS;
978
979 IEM_NOT_REACHED_DEFAULT_CASE_RET();
980 }
981 }
982 else
983 {
984 /* Ignore operand size here, memory refs are always 16-bit. */
985 IEM_MC_BEGIN(0, 2);
986 IEM_MC_LOCAL(uint16_t, u16Tmp);
987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
989 IEM_MC_FETCH_CR0_U16(u16Tmp);
990 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
991 IEM_MC_ADVANCE_RIP();
992 IEM_MC_END();
993 return VINF_SUCCESS;
994 }
995}
996
997
998/** Opcode 0x0f 0x01 /6. */
999FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1000{
1001 /* The operand size is effectively ignored, all is 16-bit and only the
1002 lower 3-bits are used. */
1003 IEMOP_MNEMONIC("lmsw");
1004 IEMOP_HLP_NO_LOCK_PREFIX();
1005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1006 {
1007 IEM_MC_BEGIN(1, 0);
1008 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1009 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1010 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1011 IEM_MC_END();
1012 }
1013 else
1014 {
1015 IEM_MC_BEGIN(1, 1);
1016 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1019 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
1020 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1021 IEM_MC_END();
1022 }
1023 return VINF_SUCCESS;
1024}
1025
1026
1027/** Opcode 0x0f 0x01 /7. */
1028FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1029{
1030 IEMOP_MNEMONIC("invlpg");
1031 IEMOP_HLP_NO_LOCK_PREFIX();
1032 IEM_MC_BEGIN(1, 1);
1033 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1035 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1036 IEM_MC_END();
1037 return VINF_SUCCESS;
1038}
1039
1040
1041/** Opcode 0x0f 0x01 /7. */
1042FNIEMOP_DEF(iemOp_Grp7_swapgs)
1043{
1044 IEMOP_MNEMONIC("swapgs");
1045 IEMOP_HLP_NO_LOCK_PREFIX();
1046 IEMOP_HLP_ONLY_64BIT();
1047 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1048}
1049
1050
1051/** Opcode 0x0f 0x01 /7. */
1052FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1053{
1054 NOREF(pIemCpu);
1055 IEMOP_BITCH_ABOUT_STUB();
1056 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1057}
1058
1059
1060/** Opcode 0x0f 0x01. */
1061FNIEMOP_DEF(iemOp_Grp7)
1062{
1063 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1064 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1065 {
1066 case 0:
1067 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1068 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1069 switch (bRm & X86_MODRM_RM_MASK)
1070 {
1071 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1072 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1073 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1074 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1075 }
1076 return IEMOP_RAISE_INVALID_OPCODE();
1077
1078 case 1:
1079 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1080 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1081 switch (bRm & X86_MODRM_RM_MASK)
1082 {
1083 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1084 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1085 }
1086 return IEMOP_RAISE_INVALID_OPCODE();
1087
1088 case 2:
1089 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1090 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1091 switch (bRm & X86_MODRM_RM_MASK)
1092 {
1093 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1094 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1095 }
1096 return IEMOP_RAISE_INVALID_OPCODE();
1097
1098 case 3:
1099 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1100 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1101 switch (bRm & X86_MODRM_RM_MASK)
1102 {
1103 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1104 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1105 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1106 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1107 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1108 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1109 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1110 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1111 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1112 }
1113
1114 case 4:
1115 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1116
1117 case 5:
1118 return IEMOP_RAISE_INVALID_OPCODE();
1119
1120 case 6:
1121 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1122
1123 case 7:
1124 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1125 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1126 switch (bRm & X86_MODRM_RM_MASK)
1127 {
1128 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1129 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1130 }
1131 return IEMOP_RAISE_INVALID_OPCODE();
1132
1133 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1134 }
1135}
1136
1137/** Opcode 0x0f 0x00 /3. */
1138FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1139{
1140 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1141 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1142
1143 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1144 {
1145 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1146 switch (pIemCpu->enmEffOpSize)
1147 {
1148 case IEMMODE_16BIT:
1149 {
1150 IEM_MC_BEGIN(4, 0);
1151 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1152 IEM_MC_ARG(uint16_t, u16Sel, 1);
1153 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1154 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1155
1156 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1157 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1158 IEM_MC_REF_EFLAGS(pEFlags);
1159 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1160
1161 IEM_MC_END();
1162 return VINF_SUCCESS;
1163 }
1164
1165 case IEMMODE_32BIT:
1166 case IEMMODE_64BIT:
1167 {
1168 IEM_MC_BEGIN(4, 0);
1169 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1170 IEM_MC_ARG(uint16_t, u16Sel, 1);
1171 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1172 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1173
1174 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1175 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1176 IEM_MC_REF_EFLAGS(pEFlags);
1177 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1178
1179 IEM_MC_END();
1180 return VINF_SUCCESS;
1181 }
1182
1183 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1184 }
1185 }
1186 else
1187 {
1188 switch (pIemCpu->enmEffOpSize)
1189 {
1190 case IEMMODE_16BIT:
1191 {
1192 IEM_MC_BEGIN(4, 1);
1193 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1194 IEM_MC_ARG(uint16_t, u16Sel, 1);
1195 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1196 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1198
1199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1200 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1201
1202 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1203 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1204 IEM_MC_REF_EFLAGS(pEFlags);
1205 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1206
1207 IEM_MC_END();
1208 return VINF_SUCCESS;
1209 }
1210
1211 case IEMMODE_32BIT:
1212 case IEMMODE_64BIT:
1213 {
1214 IEM_MC_BEGIN(4, 1);
1215 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1216 IEM_MC_ARG(uint16_t, u16Sel, 1);
1217 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1218 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1220
1221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1222 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1223/** @todo testcase: make sure it's a 16-bit read. */
1224
1225 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1226 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1227 IEM_MC_REF_EFLAGS(pEFlags);
1228 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1229
1230 IEM_MC_END();
1231 return VINF_SUCCESS;
1232 }
1233
1234 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1235 }
1236 }
1237}
1238
1239
1240
1241/** Opcode 0x0f 0x02. */
1242FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1243{
1244 IEMOP_MNEMONIC("lar Gv,Ew");
1245 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1246}
1247
1248
1249/** Opcode 0x0f 0x03. */
1250FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1251{
1252 IEMOP_MNEMONIC("lsl Gv,Ew");
1253 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1254}
1255
1256
1257/** Opcode 0x0f 0x04. */
1258FNIEMOP_DEF(iemOp_syscall)
1259{
1260 IEMOP_MNEMONIC("syscall");
1261 IEMOP_HLP_NO_LOCK_PREFIX();
1262 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1263}
1264
1265
1266/** Opcode 0x0f 0x05. */
1267FNIEMOP_DEF(iemOp_clts)
1268{
1269 IEMOP_MNEMONIC("clts");
1270 IEMOP_HLP_NO_LOCK_PREFIX();
1271 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1272}
1273
1274
1275/** Opcode 0x0f 0x06. */
1276FNIEMOP_DEF(iemOp_sysret)
1277{
1278 IEMOP_MNEMONIC("sysret");
1279 IEMOP_HLP_NO_LOCK_PREFIX();
1280 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1281}
1282
1283
1284/** Opcode 0x0f 0x08. */
1285FNIEMOP_STUB(iemOp_invd);
1286
1287
1288/** Opcode 0x0f 0x09. */
1289FNIEMOP_DEF(iemOp_wbinvd)
1290{
1291 IEMOP_MNEMONIC("wbinvd");
1292 IEMOP_HLP_NO_LOCK_PREFIX();
1293 IEM_MC_BEGIN(0, 0);
1294 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1295 IEM_MC_ADVANCE_RIP();
1296 IEM_MC_END();
1297 return VINF_SUCCESS; /* ignore for now */
1298}
1299
1300
1301/** Opcode 0x0f 0x0b. */
1302FNIEMOP_STUB(iemOp_ud2);
1303
1304/** Opcode 0x0f 0x0d. */
1305FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1306{
1307 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1308 if (!IEM_IS_AMD_CPUID_FEATURES_ANY_PRESENT(X86_CPUID_EXT_FEATURE_EDX_LONG_MODE | X86_CPUID_AMD_FEATURE_EDX_3DNOW,
1309 X86_CPUID_AMD_FEATURE_ECX_3DNOWPRF))
1310 {
1311 IEMOP_MNEMONIC("GrpP");
1312 return IEMOP_RAISE_INVALID_OPCODE();
1313 }
1314
1315 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1316 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1317 {
1318 IEMOP_MNEMONIC("GrpP");
1319 return IEMOP_RAISE_INVALID_OPCODE();
1320 }
1321
1322 IEMOP_HLP_NO_LOCK_PREFIX();
1323 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1324 {
1325 case 2: /* Aliased to /0 for the time being. */
1326 case 4: /* Aliased to /0 for the time being. */
1327 case 5: /* Aliased to /0 for the time being. */
1328 case 6: /* Aliased to /0 for the time being. */
1329 case 7: /* Aliased to /0 for the time being. */
1330 case 0: IEMOP_MNEMONIC("prefetch"); break;
1331 case 1: IEMOP_MNEMONIC("prefetchw "); break;
1332 case 3: IEMOP_MNEMONIC("prefetchw"); break;
1333 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1334 }
1335
1336 IEM_MC_BEGIN(0, 1);
1337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1339 /* Currently a NOP. */
1340 IEM_MC_ADVANCE_RIP();
1341 IEM_MC_END();
1342 return VINF_SUCCESS;
1343}
1344
1345
1346/** Opcode 0x0f 0x0e. */
1347FNIEMOP_STUB(iemOp_femms);
1348
1349
1350/** Opcode 0x0f 0x0f 0x0c. */
1351FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1352
1353/** Opcode 0x0f 0x0f 0x0d. */
1354FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1355
1356/** Opcode 0x0f 0x0f 0x1c. */
1357FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1358
1359/** Opcode 0x0f 0x0f 0x1d. */
1360FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1361
1362/** Opcode 0x0f 0x0f 0x8a. */
1363FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1364
1365/** Opcode 0x0f 0x0f 0x8e. */
1366FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1367
1368/** Opcode 0x0f 0x0f 0x90. */
1369FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1370
1371/** Opcode 0x0f 0x0f 0x94. */
1372FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1373
1374/** Opcode 0x0f 0x0f 0x96. */
1375FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1376
1377/** Opcode 0x0f 0x0f 0x97. */
1378FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1379
1380/** Opcode 0x0f 0x0f 0x9a. */
1381FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1382
1383/** Opcode 0x0f 0x0f 0x9e. */
1384FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1385
1386/** Opcode 0x0f 0x0f 0xa0. */
1387FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1388
1389/** Opcode 0x0f 0x0f 0xa4. */
1390FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1391
1392/** Opcode 0x0f 0x0f 0xa6. */
1393FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1394
1395/** Opcode 0x0f 0x0f 0xa7. */
1396FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1397
1398/** Opcode 0x0f 0x0f 0xaa. */
1399FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1400
1401/** Opcode 0x0f 0x0f 0xae. */
1402FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1403
1404/** Opcode 0x0f 0x0f 0xb0. */
1405FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1406
1407/** Opcode 0x0f 0x0f 0xb4. */
1408FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1409
1410/** Opcode 0x0f 0x0f 0xb6. */
1411FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1412
1413/** Opcode 0x0f 0x0f 0xb7. */
1414FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1415
1416/** Opcode 0x0f 0x0f 0xbb. */
1417FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1418
1419/** Opcode 0x0f 0x0f 0xbf. */
1420FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1421
1422
1423/** Opcode 0x0f 0x0f. */
1424FNIEMOP_DEF(iemOp_3Dnow)
1425{
1426 if (!IEM_IS_AMD_CPUID_FEATURE_PRESENT_EDX(X86_CPUID_AMD_FEATURE_EDX_3DNOW))
1427 {
1428 IEMOP_MNEMONIC("3Dnow");
1429 return IEMOP_RAISE_INVALID_OPCODE();
1430 }
1431
1432 /* This is pretty sparse, use switch instead of table. */
1433 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1434 switch (b)
1435 {
1436 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1437 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1438 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1439 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1440 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1441 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1442 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1443 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1444 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1445 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1446 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1447 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1448 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1449 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1450 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1451 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1452 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1453 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1454 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1455 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1456 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1457 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1458 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1459 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1460 default:
1461 return IEMOP_RAISE_INVALID_OPCODE();
1462 }
1463}
1464
1465
1466/** Opcode 0x0f 0x10. */
1467FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1468/** Opcode 0x0f 0x11. */
1469FNIEMOP_STUB(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd);
1470/** Opcode 0x0f 0x12. */
1471FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1472/** Opcode 0x0f 0x13. */
1473FNIEMOP_STUB(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq); //NEXT
1474/** Opcode 0x0f 0x14. */
1475FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1476/** Opcode 0x0f 0x15. */
1477FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1478/** Opcode 0x0f 0x16. */
1479FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1480/** Opcode 0x0f 0x17. */
1481FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1482
1483
1484/** Opcode 0x0f 0x18. */
1485FNIEMOP_DEF(iemOp_prefetch_Grp16)
1486{
1487 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1488 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1489 {
1490 IEMOP_HLP_NO_LOCK_PREFIX();
1491 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1492 {
1493 case 4: /* Aliased to /0 for the time being according to AMD. */
1494 case 5: /* Aliased to /0 for the time being according to AMD. */
1495 case 6: /* Aliased to /0 for the time being according to AMD. */
1496 case 7: /* Aliased to /0 for the time being according to AMD. */
1497 case 0: IEMOP_MNEMONIC("prefetchNTA m8"); break;
1498 case 1: IEMOP_MNEMONIC("prefetchT0 m8"); break;
1499 case 2: IEMOP_MNEMONIC("prefetchT1 m8"); break;
1500 case 3: IEMOP_MNEMONIC("prefetchT2 m8"); break;
1501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1502 }
1503
1504 IEM_MC_BEGIN(0, 1);
1505 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1507 /* Currently a NOP. */
1508 IEM_MC_ADVANCE_RIP();
1509 IEM_MC_END();
1510 return VINF_SUCCESS;
1511 }
1512
1513 return IEMOP_RAISE_INVALID_OPCODE();
1514}
1515
1516
1517/** Opcode 0x0f 0x19..0x1f. */
1518FNIEMOP_DEF(iemOp_nop_Ev)
1519{
1520 IEMOP_HLP_NO_LOCK_PREFIX();
1521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1522 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1523 {
1524 IEM_MC_BEGIN(0, 0);
1525 IEM_MC_ADVANCE_RIP();
1526 IEM_MC_END();
1527 }
1528 else
1529 {
1530 IEM_MC_BEGIN(0, 1);
1531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1533 /* Currently a NOP. */
1534 IEM_MC_ADVANCE_RIP();
1535 IEM_MC_END();
1536 }
1537 return VINF_SUCCESS;
1538}
1539
1540
1541/** Opcode 0x0f 0x20. */
1542FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1543{
1544 /* mod is ignored, as is operand size overrides. */
1545 IEMOP_MNEMONIC("mov Rd,Cd");
1546 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1547 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1548 else
1549 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1550
1551 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1552 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1553 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1554 {
1555 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1556 if (!IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_AMD_FEATURE_ECX_CR8L))
1557 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1558 iCrReg |= 8;
1559 }
1560 switch (iCrReg)
1561 {
1562 case 0: case 2: case 3: case 4: case 8:
1563 break;
1564 default:
1565 return IEMOP_RAISE_INVALID_OPCODE();
1566 }
1567 IEMOP_HLP_DONE_DECODING();
1568
1569 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB, iCrReg);
1570}
1571
1572
1573/** Opcode 0x0f 0x21. */
1574FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1575{
1576 IEMOP_MNEMONIC("mov Rd,Dd");
1577 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1578 IEMOP_HLP_NO_LOCK_PREFIX();
1579 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1580 return IEMOP_RAISE_INVALID_OPCODE();
1581 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1582 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB,
1583 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1584}
1585
1586
1587/** Opcode 0x0f 0x22. */
1588FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1589{
1590 /* mod is ignored, as is operand size overrides. */
1591 IEMOP_MNEMONIC("mov Cd,Rd");
1592 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1593 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1594 else
1595 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1596
1597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1598 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1599 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1600 {
1601 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1602 if (!IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_AMD_FEATURE_ECX_CR8L))
1603 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1604 iCrReg |= 8;
1605 }
1606 switch (iCrReg)
1607 {
1608 case 0: case 2: case 3: case 4: case 8:
1609 break;
1610 default:
1611 return IEMOP_RAISE_INVALID_OPCODE();
1612 }
1613 IEMOP_HLP_DONE_DECODING();
1614
1615 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1616}
1617
1618
1619/** Opcode 0x0f 0x23. */
1620FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1621{
1622 IEMOP_MNEMONIC("mov Dd,Rd");
1623 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1625 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1626 return IEMOP_RAISE_INVALID_OPCODE();
1627 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1628 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1629 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1630}
1631
1632
1633/** Opcode 0x0f 0x24. */
1634FNIEMOP_DEF(iemOp_mov_Rd_Td)
1635{
1636 IEMOP_MNEMONIC("mov Rd,Td");
1637 /* The RM byte is not considered, see testcase. */
1638 return IEMOP_RAISE_INVALID_OPCODE();
1639}
1640
1641
1642/** Opcode 0x0f 0x26. */
1643FNIEMOP_DEF(iemOp_mov_Td_Rd)
1644{
1645 IEMOP_MNEMONIC("mov Td,Rd");
1646 /* The RM byte is not considered, see testcase. */
1647 return IEMOP_RAISE_INVALID_OPCODE();
1648}
1649
1650
1651/** Opcode 0x0f 0x28. */
1652FNIEMOP_STUB(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd);
1653/** Opcode 0x0f 0x29. */
1654FNIEMOP_STUB(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd);
1655/** Opcode 0x0f 0x2a. */
1656FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1657/** Opcode 0x0f 0x2b. */
1658FNIEMOP_STUB(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd); //NEXT:XP
1659/** Opcode 0x0f 0x2c. */
1660FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
1661/** Opcode 0x0f 0x2d. */
1662FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
1663/** Opcode 0x0f 0x2e. */
1664FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
1665/** Opcode 0x0f 0x2f. */
1666FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
1667
1668
1669/** Opcode 0x0f 0x30. */
1670FNIEMOP_DEF(iemOp_wrmsr)
1671{
1672 IEMOP_MNEMONIC("wrmsr");
1673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1674 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1675}
1676
1677
1678/** Opcode 0x0f 0x31. */
1679FNIEMOP_DEF(iemOp_rdtsc)
1680{
1681 IEMOP_MNEMONIC("rdtsc");
1682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1683 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1684}
1685
1686
1687/** Opcode 0x0f 0x33. */
1688FNIEMOP_DEF(iemOp_rdmsr)
1689{
1690 IEMOP_MNEMONIC("rdmsr");
1691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1692 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1693}
1694
1695
1696/** Opcode 0x0f 0x34. */
1697FNIEMOP_STUB(iemOp_rdpmc);
1698/** Opcode 0x0f 0x34. */
1699FNIEMOP_STUB(iemOp_sysenter);
1700/** Opcode 0x0f 0x35. */
1701FNIEMOP_STUB(iemOp_sysexit);
1702/** Opcode 0x0f 0x37. */
1703FNIEMOP_STUB(iemOp_getsec);
1704/** Opcode 0x0f 0x38. */
1705FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1706/** Opcode 0x0f 0x3a. */
1707FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1708/** Opcode 0x0f 0x3c (?). */
1709FNIEMOP_STUB(iemOp_movnti_Gv_Ev);
1710
1711/**
1712 * Implements a conditional move.
1713 *
1714 * Wish there was an obvious way to do this where we could share and reduce
1715 * code bloat.
1716 *
1717 * @param a_Cnd The conditional "microcode" operation.
1718 */
1719#define CMOV_X(a_Cnd) \
1720 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1721 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1722 { \
1723 switch (pIemCpu->enmEffOpSize) \
1724 { \
1725 case IEMMODE_16BIT: \
1726 IEM_MC_BEGIN(0, 1); \
1727 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1728 a_Cnd { \
1729 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1730 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1731 } IEM_MC_ENDIF(); \
1732 IEM_MC_ADVANCE_RIP(); \
1733 IEM_MC_END(); \
1734 return VINF_SUCCESS; \
1735 \
1736 case IEMMODE_32BIT: \
1737 IEM_MC_BEGIN(0, 1); \
1738 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1739 a_Cnd { \
1740 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1741 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1742 } IEM_MC_ELSE() { \
1743 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1744 } IEM_MC_ENDIF(); \
1745 IEM_MC_ADVANCE_RIP(); \
1746 IEM_MC_END(); \
1747 return VINF_SUCCESS; \
1748 \
1749 case IEMMODE_64BIT: \
1750 IEM_MC_BEGIN(0, 1); \
1751 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1752 a_Cnd { \
1753 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1754 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1755 } IEM_MC_ENDIF(); \
1756 IEM_MC_ADVANCE_RIP(); \
1757 IEM_MC_END(); \
1758 return VINF_SUCCESS; \
1759 \
1760 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1761 } \
1762 } \
1763 else \
1764 { \
1765 switch (pIemCpu->enmEffOpSize) \
1766 { \
1767 case IEMMODE_16BIT: \
1768 IEM_MC_BEGIN(0, 2); \
1769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1770 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1772 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1773 a_Cnd { \
1774 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1775 } IEM_MC_ENDIF(); \
1776 IEM_MC_ADVANCE_RIP(); \
1777 IEM_MC_END(); \
1778 return VINF_SUCCESS; \
1779 \
1780 case IEMMODE_32BIT: \
1781 IEM_MC_BEGIN(0, 2); \
1782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1783 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1785 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1786 a_Cnd { \
1787 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1788 } IEM_MC_ELSE() { \
1789 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1790 } IEM_MC_ENDIF(); \
1791 IEM_MC_ADVANCE_RIP(); \
1792 IEM_MC_END(); \
1793 return VINF_SUCCESS; \
1794 \
1795 case IEMMODE_64BIT: \
1796 IEM_MC_BEGIN(0, 2); \
1797 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1798 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1800 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1801 a_Cnd { \
1802 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1803 } IEM_MC_ENDIF(); \
1804 IEM_MC_ADVANCE_RIP(); \
1805 IEM_MC_END(); \
1806 return VINF_SUCCESS; \
1807 \
1808 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1809 } \
1810 } do {} while (0)
1811
1812
1813
1814/** Opcode 0x0f 0x40. */
1815FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1816{
1817 IEMOP_MNEMONIC("cmovo Gv,Ev");
1818 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1819}
1820
1821
1822/** Opcode 0x0f 0x41. */
1823FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1824{
1825 IEMOP_MNEMONIC("cmovno Gv,Ev");
1826 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1827}
1828
1829
1830/** Opcode 0x0f 0x42. */
1831FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
1832{
1833 IEMOP_MNEMONIC("cmovc Gv,Ev");
1834 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
1835}
1836
1837
1838/** Opcode 0x0f 0x43. */
1839FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
1840{
1841 IEMOP_MNEMONIC("cmovnc Gv,Ev");
1842 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
1843}
1844
1845
1846/** Opcode 0x0f 0x44. */
1847FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
1848{
1849 IEMOP_MNEMONIC("cmove Gv,Ev");
1850 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
1851}
1852
1853
1854/** Opcode 0x0f 0x45. */
1855FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
1856{
1857 IEMOP_MNEMONIC("cmovne Gv,Ev");
1858 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
1859}
1860
1861
1862/** Opcode 0x0f 0x46. */
1863FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
1864{
1865 IEMOP_MNEMONIC("cmovbe Gv,Ev");
1866 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1867}
1868
1869
1870/** Opcode 0x0f 0x47. */
1871FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
1872{
1873 IEMOP_MNEMONIC("cmovnbe Gv,Ev");
1874 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1875}
1876
1877
1878/** Opcode 0x0f 0x48. */
1879FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
1880{
1881 IEMOP_MNEMONIC("cmovs Gv,Ev");
1882 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
1883}
1884
1885
1886/** Opcode 0x0f 0x49. */
1887FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
1888{
1889 IEMOP_MNEMONIC("cmovns Gv,Ev");
1890 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
1891}
1892
1893
1894/** Opcode 0x0f 0x4a. */
1895FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
1896{
1897 IEMOP_MNEMONIC("cmovp Gv,Ev");
1898 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
1899}
1900
1901
1902/** Opcode 0x0f 0x4b. */
1903FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
1904{
1905 IEMOP_MNEMONIC("cmovnp Gv,Ev");
1906 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
1907}
1908
1909
1910/** Opcode 0x0f 0x4c. */
1911FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
1912{
1913 IEMOP_MNEMONIC("cmovl Gv,Ev");
1914 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
1915}
1916
1917
1918/** Opcode 0x0f 0x4d. */
1919FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
1920{
1921 IEMOP_MNEMONIC("cmovnl Gv,Ev");
1922 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
1923}
1924
1925
1926/** Opcode 0x0f 0x4e. */
1927FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
1928{
1929 IEMOP_MNEMONIC("cmovle Gv,Ev");
1930 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1931}
1932
1933
1934/** Opcode 0x0f 0x4f. */
1935FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
1936{
1937 IEMOP_MNEMONIC("cmovnle Gv,Ev");
1938 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1939}
1940
1941#undef CMOV_X
1942
1943/** Opcode 0x0f 0x50. */
1944FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
1945/** Opcode 0x0f 0x51. */
1946FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
1947/** Opcode 0x0f 0x52. */
1948FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
1949/** Opcode 0x0f 0x53. */
1950FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
1951/** Opcode 0x0f 0x54. */
1952FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
1953/** Opcode 0x0f 0x55. */
1954FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
1955/** Opcode 0x0f 0x56. */
1956FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
1957/** Opcode 0x0f 0x57. */
1958FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
1959/** Opcode 0x0f 0x58. */
1960FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
1961/** Opcode 0x0f 0x59. */
1962FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
1963/** Opcode 0x0f 0x5a. */
1964FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
1965/** Opcode 0x0f 0x5b. */
1966FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
1967/** Opcode 0x0f 0x5c. */
1968FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
1969/** Opcode 0x0f 0x5d. */
1970FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
1971/** Opcode 0x0f 0x5e. */
1972FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
1973/** Opcode 0x0f 0x5f. */
1974FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
1975
1976
1977/**
1978 * Common worker for SSE2 and MMX instructions on the forms:
1979 * pxxxx xmm1, xmm2/mem128
1980 * pxxxx mm1, mm2/mem32
1981 *
1982 * The 2nd operand is the first half of a register, which in the memory case
1983 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
1984 * memory accessed for MMX.
1985 *
1986 * Exceptions type 4.
1987 */
1988FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
1989{
1990 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1991 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
1992 {
1993 case IEM_OP_PRF_SIZE_OP: /* SSE */
1994 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1995 {
1996 /*
1997 * Register, register.
1998 */
1999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2000 IEM_MC_BEGIN(2, 0);
2001 IEM_MC_ARG(uint128_t *, pDst, 0);
2002 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2003 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2004 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2005 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2006 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2007 IEM_MC_ADVANCE_RIP();
2008 IEM_MC_END();
2009 }
2010 else
2011 {
2012 /*
2013 * Register, memory.
2014 */
2015 IEM_MC_BEGIN(2, 2);
2016 IEM_MC_ARG(uint128_t *, pDst, 0);
2017 IEM_MC_LOCAL(uint64_t, uSrc);
2018 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2020
2021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2023 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2024 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2025
2026 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2027 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2028
2029 IEM_MC_ADVANCE_RIP();
2030 IEM_MC_END();
2031 }
2032 return VINF_SUCCESS;
2033
2034 case 0: /* MMX */
2035 if (!pImpl->pfnU64)
2036 return IEMOP_RAISE_INVALID_OPCODE();
2037 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2038 {
2039 /*
2040 * Register, register.
2041 */
2042 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2043 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2045 IEM_MC_BEGIN(2, 0);
2046 IEM_MC_ARG(uint64_t *, pDst, 0);
2047 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2048 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2049 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2050 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2051 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2052 IEM_MC_ADVANCE_RIP();
2053 IEM_MC_END();
2054 }
2055 else
2056 {
2057 /*
2058 * Register, memory.
2059 */
2060 IEM_MC_BEGIN(2, 2);
2061 IEM_MC_ARG(uint64_t *, pDst, 0);
2062 IEM_MC_LOCAL(uint32_t, uSrc);
2063 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2065
2066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2068 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2069 IEM_MC_FETCH_MEM_U32(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2070
2071 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2072 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2073
2074 IEM_MC_ADVANCE_RIP();
2075 IEM_MC_END();
2076 }
2077 return VINF_SUCCESS;
2078
2079 default:
2080 return IEMOP_RAISE_INVALID_OPCODE();
2081 }
2082}
2083
2084
2085/** Opcode 0x0f 0x60. */
2086FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2087{
2088 IEMOP_MNEMONIC("punpcklbw");
2089 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2090}
2091
2092
2093/** Opcode 0x0f 0x61. */
2094FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2095{
2096 IEMOP_MNEMONIC("punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2097 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2098}
2099
2100
2101/** Opcode 0x0f 0x62. */
2102FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2103{
2104 IEMOP_MNEMONIC("punpckldq");
2105 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2106}
2107
2108
2109/** Opcode 0x0f 0x63. */
2110FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2111/** Opcode 0x0f 0x64. */
2112FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2113/** Opcode 0x0f 0x65. */
2114FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2115/** Opcode 0x0f 0x66. */
2116FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2117/** Opcode 0x0f 0x67. */
2118FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2119
2120
2121/**
2122 * Common worker for SSE2 and MMX instructions on the forms:
2123 * pxxxx xmm1, xmm2/mem128
2124 * pxxxx mm1, mm2/mem64
2125 *
2126 * The 2nd operand is the second half of a register, which in the memory case
2127 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2128 * where it may read the full 128 bits or only the upper 64 bits.
2129 *
2130 * Exceptions type 4.
2131 */
2132FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2133{
2134 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2135 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2136 {
2137 case IEM_OP_PRF_SIZE_OP: /* SSE */
2138 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2139 {
2140 /*
2141 * Register, register.
2142 */
2143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2144 IEM_MC_BEGIN(2, 0);
2145 IEM_MC_ARG(uint128_t *, pDst, 0);
2146 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2147 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2148 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2149 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2150 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2151 IEM_MC_ADVANCE_RIP();
2152 IEM_MC_END();
2153 }
2154 else
2155 {
2156 /*
2157 * Register, memory.
2158 */
2159 IEM_MC_BEGIN(2, 2);
2160 IEM_MC_ARG(uint128_t *, pDst, 0);
2161 IEM_MC_LOCAL(uint128_t, uSrc);
2162 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2164
2165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2167 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2168 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2169
2170 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2171 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2172
2173 IEM_MC_ADVANCE_RIP();
2174 IEM_MC_END();
2175 }
2176 return VINF_SUCCESS;
2177
2178 case 0: /* MMX */
2179 if (!pImpl->pfnU64)
2180 return IEMOP_RAISE_INVALID_OPCODE();
2181 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2182 {
2183 /*
2184 * Register, register.
2185 */
2186 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2187 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2189 IEM_MC_BEGIN(2, 0);
2190 IEM_MC_ARG(uint64_t *, pDst, 0);
2191 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2192 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2193 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2194 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2195 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2196 IEM_MC_ADVANCE_RIP();
2197 IEM_MC_END();
2198 }
2199 else
2200 {
2201 /*
2202 * Register, memory.
2203 */
2204 IEM_MC_BEGIN(2, 2);
2205 IEM_MC_ARG(uint64_t *, pDst, 0);
2206 IEM_MC_LOCAL(uint64_t, uSrc);
2207 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2209
2210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2212 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2213 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2214
2215 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2216 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2217
2218 IEM_MC_ADVANCE_RIP();
2219 IEM_MC_END();
2220 }
2221 return VINF_SUCCESS;
2222
2223 default:
2224 return IEMOP_RAISE_INVALID_OPCODE();
2225 }
2226}
2227
2228
2229/** Opcode 0x0f 0x68. */
2230FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2231{
2232 IEMOP_MNEMONIC("punpckhbw");
2233 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2234}
2235
2236
2237/** Opcode 0x0f 0x69. */
2238FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2239{
2240 IEMOP_MNEMONIC("punpckhwd");
2241 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2242}
2243
2244
2245/** Opcode 0x0f 0x6a. */
2246FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2247{
2248 IEMOP_MNEMONIC("punpckhdq");
2249 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2250}
2251
2252/** Opcode 0x0f 0x6b. */
2253FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2254
2255
2256/** Opcode 0x0f 0x6c. */
2257FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2258{
2259 IEMOP_MNEMONIC("punpcklqdq");
2260 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2261}
2262
2263
2264/** Opcode 0x0f 0x6d. */
2265FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2266{
2267 IEMOP_MNEMONIC("punpckhqdq");
2268 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2269}
2270
2271
2272/** Opcode 0x0f 0x6e. */
2273FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2274{
2275 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2276 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2277 {
2278 case IEM_OP_PRF_SIZE_OP: /* SSE */
2279 IEMOP_MNEMONIC("movd/q Wd/q,Ed/q");
2280 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2281 {
2282 /* XMM, greg*/
2283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2284 IEM_MC_BEGIN(0, 1);
2285 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2286 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2287 {
2288 IEM_MC_LOCAL(uint64_t, u64Tmp);
2289 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2290 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2291 }
2292 else
2293 {
2294 IEM_MC_LOCAL(uint32_t, u32Tmp);
2295 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2296 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2297 }
2298 IEM_MC_ADVANCE_RIP();
2299 IEM_MC_END();
2300 }
2301 else
2302 {
2303 /* XMM, [mem] */
2304 IEM_MC_BEGIN(0, 2);
2305 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2306 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2309 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2310 {
2311 IEM_MC_LOCAL(uint64_t, u64Tmp);
2312 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2313 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2314 }
2315 else
2316 {
2317 IEM_MC_LOCAL(uint32_t, u32Tmp);
2318 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2319 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2320 }
2321 IEM_MC_ADVANCE_RIP();
2322 IEM_MC_END();
2323 }
2324 return VINF_SUCCESS;
2325
2326 case 0: /* MMX */
2327 IEMOP_MNEMONIC("movq/d Pd/q,Ed/q");
2328 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2329 {
2330 /* MMX, greg */
2331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2332 IEM_MC_BEGIN(0, 1);
2333 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2334 IEM_MC_LOCAL(uint64_t, u64Tmp);
2335 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2336 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2337 else
2338 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2339 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2340 IEM_MC_ADVANCE_RIP();
2341 IEM_MC_END();
2342 }
2343 else
2344 {
2345 /* MMX, [mem] */
2346 IEM_MC_BEGIN(0, 2);
2347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2348 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2351 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2352 {
2353 IEM_MC_LOCAL(uint64_t, u64Tmp);
2354 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2355 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2356 }
2357 else
2358 {
2359 IEM_MC_LOCAL(uint32_t, u32Tmp);
2360 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2361 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2362 }
2363 IEM_MC_ADVANCE_RIP();
2364 IEM_MC_END();
2365 }
2366 return VINF_SUCCESS;
2367
2368 default:
2369 return IEMOP_RAISE_INVALID_OPCODE();
2370 }
2371}
2372
2373
2374/** Opcode 0x0f 0x6f. */
2375FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2376{
2377 bool fAligned = false;
2378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2379 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2380 {
2381 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2382 fAligned = true;
2383 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2384 if (fAligned)
2385 IEMOP_MNEMONIC("movdqa Vdq,Wdq");
2386 else
2387 IEMOP_MNEMONIC("movdqu Vdq,Wdq");
2388 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2389 {
2390 /*
2391 * Register, register.
2392 */
2393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2394 IEM_MC_BEGIN(0, 1);
2395 IEM_MC_LOCAL(uint128_t, u128Tmp);
2396 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2397 IEM_MC_FETCH_XREG_U128(u128Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2398 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2399 IEM_MC_ADVANCE_RIP();
2400 IEM_MC_END();
2401 }
2402 else
2403 {
2404 /*
2405 * Register, memory.
2406 */
2407 IEM_MC_BEGIN(0, 2);
2408 IEM_MC_LOCAL(uint128_t, u128Tmp);
2409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2410
2411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2413 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2414 if (fAligned)
2415 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2416 else
2417 IEM_MC_FETCH_MEM_U128(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2418 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2419
2420 IEM_MC_ADVANCE_RIP();
2421 IEM_MC_END();
2422 }
2423 return VINF_SUCCESS;
2424
2425 case 0: /* MMX */
2426 IEMOP_MNEMONIC("movq Pq,Qq");
2427 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2428 {
2429 /*
2430 * Register, register.
2431 */
2432 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2433 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2435 IEM_MC_BEGIN(0, 1);
2436 IEM_MC_LOCAL(uint64_t, u64Tmp);
2437 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2438 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2439 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2440 IEM_MC_ADVANCE_RIP();
2441 IEM_MC_END();
2442 }
2443 else
2444 {
2445 /*
2446 * Register, memory.
2447 */
2448 IEM_MC_BEGIN(0, 2);
2449 IEM_MC_LOCAL(uint64_t, u64Tmp);
2450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2451
2452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2454 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2455 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2456 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2457
2458 IEM_MC_ADVANCE_RIP();
2459 IEM_MC_END();
2460 }
2461 return VINF_SUCCESS;
2462
2463 default:
2464 return IEMOP_RAISE_INVALID_OPCODE();
2465 }
2466}
2467
2468
2469/** Opcode 0x0f 0x70. The immediate here is evil! */
2470FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2471{
2472 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2473 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2474 {
2475 case IEM_OP_PRF_SIZE_OP: /* SSE */
2476 case IEM_OP_PRF_REPNZ: /* SSE */
2477 case IEM_OP_PRF_REPZ: /* SSE */
2478 {
2479 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2480 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2481 {
2482 case IEM_OP_PRF_SIZE_OP:
2483 IEMOP_MNEMONIC("pshufd Vdq,Wdq,Ib");
2484 pfnAImpl = iemAImpl_pshufd;
2485 break;
2486 case IEM_OP_PRF_REPNZ:
2487 IEMOP_MNEMONIC("pshuflw Vdq,Wdq,Ib");
2488 pfnAImpl = iemAImpl_pshuflw;
2489 break;
2490 case IEM_OP_PRF_REPZ:
2491 IEMOP_MNEMONIC("pshufhw Vdq,Wdq,Ib");
2492 pfnAImpl = iemAImpl_pshufhw;
2493 break;
2494 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2495 }
2496 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2497 {
2498 /*
2499 * Register, register.
2500 */
2501 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2503
2504 IEM_MC_BEGIN(3, 0);
2505 IEM_MC_ARG(uint128_t *, pDst, 0);
2506 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2507 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2508 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2509 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2510 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2511 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2512 IEM_MC_ADVANCE_RIP();
2513 IEM_MC_END();
2514 }
2515 else
2516 {
2517 /*
2518 * Register, memory.
2519 */
2520 IEM_MC_BEGIN(3, 2);
2521 IEM_MC_ARG(uint128_t *, pDst, 0);
2522 IEM_MC_LOCAL(uint128_t, uSrc);
2523 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2525
2526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2527 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2528 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2530 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2531
2532 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2533 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2534 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2535
2536 IEM_MC_ADVANCE_RIP();
2537 IEM_MC_END();
2538 }
2539 return VINF_SUCCESS;
2540 }
2541
2542 case 0: /* MMX Extension */
2543 IEMOP_MNEMONIC("pshufw Pq,Qq,Ib");
2544 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2545 {
2546 /*
2547 * Register, register.
2548 */
2549 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2551
2552 IEM_MC_BEGIN(3, 0);
2553 IEM_MC_ARG(uint64_t *, pDst, 0);
2554 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2555 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2556 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2557 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2558 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2559 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2560 IEM_MC_ADVANCE_RIP();
2561 IEM_MC_END();
2562 }
2563 else
2564 {
2565 /*
2566 * Register, memory.
2567 */
2568 IEM_MC_BEGIN(3, 2);
2569 IEM_MC_ARG(uint64_t *, pDst, 0);
2570 IEM_MC_LOCAL(uint64_t, uSrc);
2571 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2573
2574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2575 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2576 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2578 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2579
2580 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2581 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2582 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2583
2584 IEM_MC_ADVANCE_RIP();
2585 IEM_MC_END();
2586 }
2587 return VINF_SUCCESS;
2588
2589 default:
2590 return IEMOP_RAISE_INVALID_OPCODE();
2591 }
2592}
2593
2594
2595/** Opcode 0x0f 0x71 11/2. */
2596FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2597
2598/** Opcode 0x66 0x0f 0x71 11/2. */
2599FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2600
2601/** Opcode 0x0f 0x71 11/4. */
2602FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2603
2604/** Opcode 0x66 0x0f 0x71 11/4. */
2605FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2606
2607/** Opcode 0x0f 0x71 11/6. */
2608FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2609
2610/** Opcode 0x66 0x0f 0x71 11/6. */
2611FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2612
2613
2614/** Opcode 0x0f 0x71. */
2615FNIEMOP_DEF(iemOp_Grp12)
2616{
2617 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2618 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2619 return IEMOP_RAISE_INVALID_OPCODE();
2620 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2621 {
2622 case 0: case 1: case 3: case 5: case 7:
2623 return IEMOP_RAISE_INVALID_OPCODE();
2624 case 2:
2625 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2626 {
2627 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
2628 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
2629 default: return IEMOP_RAISE_INVALID_OPCODE();
2630 }
2631 case 4:
2632 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2633 {
2634 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
2635 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
2636 default: return IEMOP_RAISE_INVALID_OPCODE();
2637 }
2638 case 6:
2639 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2640 {
2641 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
2642 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
2643 default: return IEMOP_RAISE_INVALID_OPCODE();
2644 }
2645 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2646 }
2647}
2648
2649
2650/** Opcode 0x0f 0x72 11/2. */
2651FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2652
2653/** Opcode 0x66 0x0f 0x72 11/2. */
2654FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
2655
2656/** Opcode 0x0f 0x72 11/4. */
2657FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2658
2659/** Opcode 0x66 0x0f 0x72 11/4. */
2660FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
2661
2662/** Opcode 0x0f 0x72 11/6. */
2663FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2664
2665/** Opcode 0x66 0x0f 0x72 11/6. */
2666FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
2667
2668
2669/** Opcode 0x0f 0x72. */
2670FNIEMOP_DEF(iemOp_Grp13)
2671{
2672 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2673 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2674 return IEMOP_RAISE_INVALID_OPCODE();
2675 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2676 {
2677 case 0: case 1: case 3: case 5: case 7:
2678 return IEMOP_RAISE_INVALID_OPCODE();
2679 case 2:
2680 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2681 {
2682 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
2683 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
2684 default: return IEMOP_RAISE_INVALID_OPCODE();
2685 }
2686 case 4:
2687 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2688 {
2689 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
2690 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
2691 default: return IEMOP_RAISE_INVALID_OPCODE();
2692 }
2693 case 6:
2694 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2695 {
2696 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
2697 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
2698 default: return IEMOP_RAISE_INVALID_OPCODE();
2699 }
2700 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2701 }
2702}
2703
2704
2705/** Opcode 0x0f 0x73 11/2. */
2706FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
2707
2708/** Opcode 0x66 0x0f 0x73 11/2. */
2709FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
2710
2711/** Opcode 0x66 0x0f 0x73 11/3. */
2712FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
2713
2714/** Opcode 0x0f 0x73 11/6. */
2715FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
2716
2717/** Opcode 0x66 0x0f 0x73 11/6. */
2718FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
2719
2720/** Opcode 0x66 0x0f 0x73 11/7. */
2721FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
2722
2723
2724/** Opcode 0x0f 0x73. */
2725FNIEMOP_DEF(iemOp_Grp14)
2726{
2727 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2728 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2729 return IEMOP_RAISE_INVALID_OPCODE();
2730 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2731 {
2732 case 0: case 1: case 4: case 5:
2733 return IEMOP_RAISE_INVALID_OPCODE();
2734 case 2:
2735 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2736 {
2737 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
2738 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
2739 default: return IEMOP_RAISE_INVALID_OPCODE();
2740 }
2741 case 3:
2742 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2743 {
2744 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
2745 default: return IEMOP_RAISE_INVALID_OPCODE();
2746 }
2747 case 6:
2748 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2749 {
2750 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
2751 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
2752 default: return IEMOP_RAISE_INVALID_OPCODE();
2753 }
2754 case 7:
2755 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2756 {
2757 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
2758 default: return IEMOP_RAISE_INVALID_OPCODE();
2759 }
2760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2761 }
2762}
2763
2764
2765/**
2766 * Common worker for SSE2 and MMX instructions on the forms:
2767 * pxxx mm1, mm2/mem64
2768 * pxxx xmm1, xmm2/mem128
2769 *
2770 * Proper alignment of the 128-bit operand is enforced.
2771 * Exceptions type 4. SSE2 and MMX cpuid checks.
2772 */
2773FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
2774{
2775 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2776 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2777 {
2778 case IEM_OP_PRF_SIZE_OP: /* SSE */
2779 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2780 {
2781 /*
2782 * Register, register.
2783 */
2784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2785 IEM_MC_BEGIN(2, 0);
2786 IEM_MC_ARG(uint128_t *, pDst, 0);
2787 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2788 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2789 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2790 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2791 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2792 IEM_MC_ADVANCE_RIP();
2793 IEM_MC_END();
2794 }
2795 else
2796 {
2797 /*
2798 * Register, memory.
2799 */
2800 IEM_MC_BEGIN(2, 2);
2801 IEM_MC_ARG(uint128_t *, pDst, 0);
2802 IEM_MC_LOCAL(uint128_t, uSrc);
2803 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2805
2806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2808 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2809 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2810
2811 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2812 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2813
2814 IEM_MC_ADVANCE_RIP();
2815 IEM_MC_END();
2816 }
2817 return VINF_SUCCESS;
2818
2819 case 0: /* MMX */
2820 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2821 {
2822 /*
2823 * Register, register.
2824 */
2825 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2826 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2828 IEM_MC_BEGIN(2, 0);
2829 IEM_MC_ARG(uint64_t *, pDst, 0);
2830 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2831 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2832 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2833 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2834 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2835 IEM_MC_ADVANCE_RIP();
2836 IEM_MC_END();
2837 }
2838 else
2839 {
2840 /*
2841 * Register, memory.
2842 */
2843 IEM_MC_BEGIN(2, 2);
2844 IEM_MC_ARG(uint64_t *, pDst, 0);
2845 IEM_MC_LOCAL(uint64_t, uSrc);
2846 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2848
2849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2851 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2852 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2853
2854 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2855 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2856
2857 IEM_MC_ADVANCE_RIP();
2858 IEM_MC_END();
2859 }
2860 return VINF_SUCCESS;
2861
2862 default:
2863 return IEMOP_RAISE_INVALID_OPCODE();
2864 }
2865}
2866
2867
2868/** Opcode 0x0f 0x74. */
2869FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
2870{
2871 IEMOP_MNEMONIC("pcmpeqb");
2872 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
2873}
2874
2875
2876/** Opcode 0x0f 0x75. */
2877FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
2878{
2879 IEMOP_MNEMONIC("pcmpeqw");
2880 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
2881}
2882
2883
2884/** Opcode 0x0f 0x76. */
2885FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
2886{
2887 IEMOP_MNEMONIC("pcmpeqd");
2888 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
2889}
2890
2891
2892/** Opcode 0x0f 0x77. */
2893FNIEMOP_STUB(iemOp_emms);
2894/** Opcode 0x0f 0x78. */
2895FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
2896/** Opcode 0x0f 0x79. */
2897FNIEMOP_UD_STUB(iemOp_vmwrite);
2898/** Opcode 0x0f 0x7c. */
2899FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
2900/** Opcode 0x0f 0x7d. */
2901FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
2902
2903
2904/** Opcode 0x0f 0x7e. */
2905FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
2906{
2907 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2908 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2909 {
2910 case IEM_OP_PRF_SIZE_OP: /* SSE */
2911 IEMOP_MNEMONIC("movd/q Ed/q,Wd/q");
2912 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2913 {
2914 /* greg, XMM */
2915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2916 IEM_MC_BEGIN(0, 1);
2917 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2918 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2919 {
2920 IEM_MC_LOCAL(uint64_t, u64Tmp);
2921 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2922 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
2923 }
2924 else
2925 {
2926 IEM_MC_LOCAL(uint32_t, u32Tmp);
2927 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2928 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
2929 }
2930 IEM_MC_ADVANCE_RIP();
2931 IEM_MC_END();
2932 }
2933 else
2934 {
2935 /* [mem], XMM */
2936 IEM_MC_BEGIN(0, 2);
2937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2938 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2941 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2942 {
2943 IEM_MC_LOCAL(uint64_t, u64Tmp);
2944 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2945 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
2946 }
2947 else
2948 {
2949 IEM_MC_LOCAL(uint32_t, u32Tmp);
2950 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2951 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
2952 }
2953 IEM_MC_ADVANCE_RIP();
2954 IEM_MC_END();
2955 }
2956 return VINF_SUCCESS;
2957
2958 case 0: /* MMX */
2959 IEMOP_MNEMONIC("movq/d Ed/q,Pd/q");
2960 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2961 {
2962 /* greg, MMX */
2963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2964 IEM_MC_BEGIN(0, 1);
2965 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2966 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2967 {
2968 IEM_MC_LOCAL(uint64_t, u64Tmp);
2969 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2970 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
2971 }
2972 else
2973 {
2974 IEM_MC_LOCAL(uint32_t, u32Tmp);
2975 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2976 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
2977 }
2978 IEM_MC_ADVANCE_RIP();
2979 IEM_MC_END();
2980 }
2981 else
2982 {
2983 /* [mem], MMX */
2984 IEM_MC_BEGIN(0, 2);
2985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2986 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2989 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2990 {
2991 IEM_MC_LOCAL(uint64_t, u64Tmp);
2992 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2993 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
2994 }
2995 else
2996 {
2997 IEM_MC_LOCAL(uint32_t, u32Tmp);
2998 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2999 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3000 }
3001 IEM_MC_ADVANCE_RIP();
3002 IEM_MC_END();
3003 }
3004 return VINF_SUCCESS;
3005
3006 default:
3007 return IEMOP_RAISE_INVALID_OPCODE();
3008 }
3009}
3010
3011
3012/** Opcode 0x0f 0x7f. */
3013FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3014{
3015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3016 bool fAligned = false;
3017 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3018 {
3019 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3020 fAligned = true;
3021 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3022 if (fAligned)
3023 IEMOP_MNEMONIC("movdqa Wdq,Vdq");
3024 else
3025 IEMOP_MNEMONIC("movdqu Wdq,Vdq");
3026 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3027 {
3028 /*
3029 * Register, register.
3030 */
3031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3032 IEM_MC_BEGIN(0, 1);
3033 IEM_MC_LOCAL(uint128_t, u128Tmp);
3034 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3035 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3036 IEM_MC_STORE_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u128Tmp);
3037 IEM_MC_ADVANCE_RIP();
3038 IEM_MC_END();
3039 }
3040 else
3041 {
3042 /*
3043 * Register, memory.
3044 */
3045 IEM_MC_BEGIN(0, 2);
3046 IEM_MC_LOCAL(uint128_t, u128Tmp);
3047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3048
3049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3051 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3052 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3053 if (fAligned)
3054 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3055 else
3056 IEM_MC_STORE_MEM_U128(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3057
3058 IEM_MC_ADVANCE_RIP();
3059 IEM_MC_END();
3060 }
3061 return VINF_SUCCESS;
3062
3063 case 0: /* MMX */
3064 IEMOP_MNEMONIC("movq Qq,Pq");
3065
3066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3067 {
3068 /*
3069 * Register, register.
3070 */
3071 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3072 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3074 IEM_MC_BEGIN(0, 1);
3075 IEM_MC_LOCAL(uint64_t, u64Tmp);
3076 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3077 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3078 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3079 IEM_MC_ADVANCE_RIP();
3080 IEM_MC_END();
3081 }
3082 else
3083 {
3084 /*
3085 * Register, memory.
3086 */
3087 IEM_MC_BEGIN(0, 2);
3088 IEM_MC_LOCAL(uint64_t, u64Tmp);
3089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3090
3091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3093 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3094 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3095 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3096
3097 IEM_MC_ADVANCE_RIP();
3098 IEM_MC_END();
3099 }
3100 return VINF_SUCCESS;
3101
3102 default:
3103 return IEMOP_RAISE_INVALID_OPCODE();
3104 }
3105}
3106
3107
3108
3109/** Opcode 0x0f 0x80. */
3110FNIEMOP_DEF(iemOp_jo_Jv)
3111{
3112 IEMOP_MNEMONIC("jo Jv");
3113 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3114 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3115 {
3116 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3117 IEMOP_HLP_NO_LOCK_PREFIX();
3118
3119 IEM_MC_BEGIN(0, 0);
3120 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3121 IEM_MC_REL_JMP_S16(i16Imm);
3122 } IEM_MC_ELSE() {
3123 IEM_MC_ADVANCE_RIP();
3124 } IEM_MC_ENDIF();
3125 IEM_MC_END();
3126 }
3127 else
3128 {
3129 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3130 IEMOP_HLP_NO_LOCK_PREFIX();
3131
3132 IEM_MC_BEGIN(0, 0);
3133 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3134 IEM_MC_REL_JMP_S32(i32Imm);
3135 } IEM_MC_ELSE() {
3136 IEM_MC_ADVANCE_RIP();
3137 } IEM_MC_ENDIF();
3138 IEM_MC_END();
3139 }
3140 return VINF_SUCCESS;
3141}
3142
3143
3144/** Opcode 0x0f 0x81. */
3145FNIEMOP_DEF(iemOp_jno_Jv)
3146{
3147 IEMOP_MNEMONIC("jno Jv");
3148 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3149 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3150 {
3151 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3152 IEMOP_HLP_NO_LOCK_PREFIX();
3153
3154 IEM_MC_BEGIN(0, 0);
3155 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3156 IEM_MC_ADVANCE_RIP();
3157 } IEM_MC_ELSE() {
3158 IEM_MC_REL_JMP_S16(i16Imm);
3159 } IEM_MC_ENDIF();
3160 IEM_MC_END();
3161 }
3162 else
3163 {
3164 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3165 IEMOP_HLP_NO_LOCK_PREFIX();
3166
3167 IEM_MC_BEGIN(0, 0);
3168 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3169 IEM_MC_ADVANCE_RIP();
3170 } IEM_MC_ELSE() {
3171 IEM_MC_REL_JMP_S32(i32Imm);
3172 } IEM_MC_ENDIF();
3173 IEM_MC_END();
3174 }
3175 return VINF_SUCCESS;
3176}
3177
3178
3179/** Opcode 0x0f 0x82. */
3180FNIEMOP_DEF(iemOp_jc_Jv)
3181{
3182 IEMOP_MNEMONIC("jc/jb/jnae Jv");
3183 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3184 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3185 {
3186 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3187 IEMOP_HLP_NO_LOCK_PREFIX();
3188
3189 IEM_MC_BEGIN(0, 0);
3190 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3191 IEM_MC_REL_JMP_S16(i16Imm);
3192 } IEM_MC_ELSE() {
3193 IEM_MC_ADVANCE_RIP();
3194 } IEM_MC_ENDIF();
3195 IEM_MC_END();
3196 }
3197 else
3198 {
3199 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3200 IEMOP_HLP_NO_LOCK_PREFIX();
3201
3202 IEM_MC_BEGIN(0, 0);
3203 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3204 IEM_MC_REL_JMP_S32(i32Imm);
3205 } IEM_MC_ELSE() {
3206 IEM_MC_ADVANCE_RIP();
3207 } IEM_MC_ENDIF();
3208 IEM_MC_END();
3209 }
3210 return VINF_SUCCESS;
3211}
3212
3213
3214/** Opcode 0x0f 0x83. */
3215FNIEMOP_DEF(iemOp_jnc_Jv)
3216{
3217 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
3218 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3219 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3220 {
3221 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3222 IEMOP_HLP_NO_LOCK_PREFIX();
3223
3224 IEM_MC_BEGIN(0, 0);
3225 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3226 IEM_MC_ADVANCE_RIP();
3227 } IEM_MC_ELSE() {
3228 IEM_MC_REL_JMP_S16(i16Imm);
3229 } IEM_MC_ENDIF();
3230 IEM_MC_END();
3231 }
3232 else
3233 {
3234 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3235 IEMOP_HLP_NO_LOCK_PREFIX();
3236
3237 IEM_MC_BEGIN(0, 0);
3238 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3239 IEM_MC_ADVANCE_RIP();
3240 } IEM_MC_ELSE() {
3241 IEM_MC_REL_JMP_S32(i32Imm);
3242 } IEM_MC_ENDIF();
3243 IEM_MC_END();
3244 }
3245 return VINF_SUCCESS;
3246}
3247
3248
3249/** Opcode 0x0f 0x84. */
3250FNIEMOP_DEF(iemOp_je_Jv)
3251{
3252 IEMOP_MNEMONIC("je/jz Jv");
3253 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3254 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3255 {
3256 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3257 IEMOP_HLP_NO_LOCK_PREFIX();
3258
3259 IEM_MC_BEGIN(0, 0);
3260 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3261 IEM_MC_REL_JMP_S16(i16Imm);
3262 } IEM_MC_ELSE() {
3263 IEM_MC_ADVANCE_RIP();
3264 } IEM_MC_ENDIF();
3265 IEM_MC_END();
3266 }
3267 else
3268 {
3269 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3270 IEMOP_HLP_NO_LOCK_PREFIX();
3271
3272 IEM_MC_BEGIN(0, 0);
3273 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3274 IEM_MC_REL_JMP_S32(i32Imm);
3275 } IEM_MC_ELSE() {
3276 IEM_MC_ADVANCE_RIP();
3277 } IEM_MC_ENDIF();
3278 IEM_MC_END();
3279 }
3280 return VINF_SUCCESS;
3281}
3282
3283
3284/** Opcode 0x0f 0x85. */
3285FNIEMOP_DEF(iemOp_jne_Jv)
3286{
3287 IEMOP_MNEMONIC("jne/jnz Jv");
3288 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3289 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3290 {
3291 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3292 IEMOP_HLP_NO_LOCK_PREFIX();
3293
3294 IEM_MC_BEGIN(0, 0);
3295 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3296 IEM_MC_ADVANCE_RIP();
3297 } IEM_MC_ELSE() {
3298 IEM_MC_REL_JMP_S16(i16Imm);
3299 } IEM_MC_ENDIF();
3300 IEM_MC_END();
3301 }
3302 else
3303 {
3304 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3305 IEMOP_HLP_NO_LOCK_PREFIX();
3306
3307 IEM_MC_BEGIN(0, 0);
3308 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3309 IEM_MC_ADVANCE_RIP();
3310 } IEM_MC_ELSE() {
3311 IEM_MC_REL_JMP_S32(i32Imm);
3312 } IEM_MC_ENDIF();
3313 IEM_MC_END();
3314 }
3315 return VINF_SUCCESS;
3316}
3317
3318
3319/** Opcode 0x0f 0x86. */
3320FNIEMOP_DEF(iemOp_jbe_Jv)
3321{
3322 IEMOP_MNEMONIC("jbe/jna Jv");
3323 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3324 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3325 {
3326 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3327 IEMOP_HLP_NO_LOCK_PREFIX();
3328
3329 IEM_MC_BEGIN(0, 0);
3330 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3331 IEM_MC_REL_JMP_S16(i16Imm);
3332 } IEM_MC_ELSE() {
3333 IEM_MC_ADVANCE_RIP();
3334 } IEM_MC_ENDIF();
3335 IEM_MC_END();
3336 }
3337 else
3338 {
3339 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3340 IEMOP_HLP_NO_LOCK_PREFIX();
3341
3342 IEM_MC_BEGIN(0, 0);
3343 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3344 IEM_MC_REL_JMP_S32(i32Imm);
3345 } IEM_MC_ELSE() {
3346 IEM_MC_ADVANCE_RIP();
3347 } IEM_MC_ENDIF();
3348 IEM_MC_END();
3349 }
3350 return VINF_SUCCESS;
3351}
3352
3353
3354/** Opcode 0x0f 0x87. */
3355FNIEMOP_DEF(iemOp_jnbe_Jv)
3356{
3357 IEMOP_MNEMONIC("jnbe/ja Jv");
3358 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3359 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3360 {
3361 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3362 IEMOP_HLP_NO_LOCK_PREFIX();
3363
3364 IEM_MC_BEGIN(0, 0);
3365 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3366 IEM_MC_ADVANCE_RIP();
3367 } IEM_MC_ELSE() {
3368 IEM_MC_REL_JMP_S16(i16Imm);
3369 } IEM_MC_ENDIF();
3370 IEM_MC_END();
3371 }
3372 else
3373 {
3374 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3375 IEMOP_HLP_NO_LOCK_PREFIX();
3376
3377 IEM_MC_BEGIN(0, 0);
3378 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3379 IEM_MC_ADVANCE_RIP();
3380 } IEM_MC_ELSE() {
3381 IEM_MC_REL_JMP_S32(i32Imm);
3382 } IEM_MC_ENDIF();
3383 IEM_MC_END();
3384 }
3385 return VINF_SUCCESS;
3386}
3387
3388
3389/** Opcode 0x0f 0x88. */
3390FNIEMOP_DEF(iemOp_js_Jv)
3391{
3392 IEMOP_MNEMONIC("js Jv");
3393 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3394 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3395 {
3396 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3397 IEMOP_HLP_NO_LOCK_PREFIX();
3398
3399 IEM_MC_BEGIN(0, 0);
3400 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3401 IEM_MC_REL_JMP_S16(i16Imm);
3402 } IEM_MC_ELSE() {
3403 IEM_MC_ADVANCE_RIP();
3404 } IEM_MC_ENDIF();
3405 IEM_MC_END();
3406 }
3407 else
3408 {
3409 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3410 IEMOP_HLP_NO_LOCK_PREFIX();
3411
3412 IEM_MC_BEGIN(0, 0);
3413 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3414 IEM_MC_REL_JMP_S32(i32Imm);
3415 } IEM_MC_ELSE() {
3416 IEM_MC_ADVANCE_RIP();
3417 } IEM_MC_ENDIF();
3418 IEM_MC_END();
3419 }
3420 return VINF_SUCCESS;
3421}
3422
3423
3424/** Opcode 0x0f 0x89. */
3425FNIEMOP_DEF(iemOp_jns_Jv)
3426{
3427 IEMOP_MNEMONIC("jns Jv");
3428 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3429 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3430 {
3431 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3432 IEMOP_HLP_NO_LOCK_PREFIX();
3433
3434 IEM_MC_BEGIN(0, 0);
3435 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3436 IEM_MC_ADVANCE_RIP();
3437 } IEM_MC_ELSE() {
3438 IEM_MC_REL_JMP_S16(i16Imm);
3439 } IEM_MC_ENDIF();
3440 IEM_MC_END();
3441 }
3442 else
3443 {
3444 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3445 IEMOP_HLP_NO_LOCK_PREFIX();
3446
3447 IEM_MC_BEGIN(0, 0);
3448 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3449 IEM_MC_ADVANCE_RIP();
3450 } IEM_MC_ELSE() {
3451 IEM_MC_REL_JMP_S32(i32Imm);
3452 } IEM_MC_ENDIF();
3453 IEM_MC_END();
3454 }
3455 return VINF_SUCCESS;
3456}
3457
3458
3459/** Opcode 0x0f 0x8a. */
3460FNIEMOP_DEF(iemOp_jp_Jv)
3461{
3462 IEMOP_MNEMONIC("jp Jv");
3463 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3464 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3465 {
3466 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3467 IEMOP_HLP_NO_LOCK_PREFIX();
3468
3469 IEM_MC_BEGIN(0, 0);
3470 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3471 IEM_MC_REL_JMP_S16(i16Imm);
3472 } IEM_MC_ELSE() {
3473 IEM_MC_ADVANCE_RIP();
3474 } IEM_MC_ENDIF();
3475 IEM_MC_END();
3476 }
3477 else
3478 {
3479 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3480 IEMOP_HLP_NO_LOCK_PREFIX();
3481
3482 IEM_MC_BEGIN(0, 0);
3483 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3484 IEM_MC_REL_JMP_S32(i32Imm);
3485 } IEM_MC_ELSE() {
3486 IEM_MC_ADVANCE_RIP();
3487 } IEM_MC_ENDIF();
3488 IEM_MC_END();
3489 }
3490 return VINF_SUCCESS;
3491}
3492
3493
3494/** Opcode 0x0f 0x8b. */
3495FNIEMOP_DEF(iemOp_jnp_Jv)
3496{
3497 IEMOP_MNEMONIC("jo Jv");
3498 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3499 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3500 {
3501 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3502 IEMOP_HLP_NO_LOCK_PREFIX();
3503
3504 IEM_MC_BEGIN(0, 0);
3505 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3506 IEM_MC_ADVANCE_RIP();
3507 } IEM_MC_ELSE() {
3508 IEM_MC_REL_JMP_S16(i16Imm);
3509 } IEM_MC_ENDIF();
3510 IEM_MC_END();
3511 }
3512 else
3513 {
3514 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3515 IEMOP_HLP_NO_LOCK_PREFIX();
3516
3517 IEM_MC_BEGIN(0, 0);
3518 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3519 IEM_MC_ADVANCE_RIP();
3520 } IEM_MC_ELSE() {
3521 IEM_MC_REL_JMP_S32(i32Imm);
3522 } IEM_MC_ENDIF();
3523 IEM_MC_END();
3524 }
3525 return VINF_SUCCESS;
3526}
3527
3528
3529/** Opcode 0x0f 0x8c. */
3530FNIEMOP_DEF(iemOp_jl_Jv)
3531{
3532 IEMOP_MNEMONIC("jl/jnge Jv");
3533 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3534 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3535 {
3536 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3537 IEMOP_HLP_NO_LOCK_PREFIX();
3538
3539 IEM_MC_BEGIN(0, 0);
3540 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3541 IEM_MC_REL_JMP_S16(i16Imm);
3542 } IEM_MC_ELSE() {
3543 IEM_MC_ADVANCE_RIP();
3544 } IEM_MC_ENDIF();
3545 IEM_MC_END();
3546 }
3547 else
3548 {
3549 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3550 IEMOP_HLP_NO_LOCK_PREFIX();
3551
3552 IEM_MC_BEGIN(0, 0);
3553 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3554 IEM_MC_REL_JMP_S32(i32Imm);
3555 } IEM_MC_ELSE() {
3556 IEM_MC_ADVANCE_RIP();
3557 } IEM_MC_ENDIF();
3558 IEM_MC_END();
3559 }
3560 return VINF_SUCCESS;
3561}
3562
3563
3564/** Opcode 0x0f 0x8d. */
3565FNIEMOP_DEF(iemOp_jnl_Jv)
3566{
3567 IEMOP_MNEMONIC("jnl/jge Jv");
3568 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3569 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3570 {
3571 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3572 IEMOP_HLP_NO_LOCK_PREFIX();
3573
3574 IEM_MC_BEGIN(0, 0);
3575 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3576 IEM_MC_ADVANCE_RIP();
3577 } IEM_MC_ELSE() {
3578 IEM_MC_REL_JMP_S16(i16Imm);
3579 } IEM_MC_ENDIF();
3580 IEM_MC_END();
3581 }
3582 else
3583 {
3584 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3585 IEMOP_HLP_NO_LOCK_PREFIX();
3586
3587 IEM_MC_BEGIN(0, 0);
3588 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3589 IEM_MC_ADVANCE_RIP();
3590 } IEM_MC_ELSE() {
3591 IEM_MC_REL_JMP_S32(i32Imm);
3592 } IEM_MC_ENDIF();
3593 IEM_MC_END();
3594 }
3595 return VINF_SUCCESS;
3596}
3597
3598
3599/** Opcode 0x0f 0x8e. */
3600FNIEMOP_DEF(iemOp_jle_Jv)
3601{
3602 IEMOP_MNEMONIC("jle/jng Jv");
3603 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3604 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3605 {
3606 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3607 IEMOP_HLP_NO_LOCK_PREFIX();
3608
3609 IEM_MC_BEGIN(0, 0);
3610 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3611 IEM_MC_REL_JMP_S16(i16Imm);
3612 } IEM_MC_ELSE() {
3613 IEM_MC_ADVANCE_RIP();
3614 } IEM_MC_ENDIF();
3615 IEM_MC_END();
3616 }
3617 else
3618 {
3619 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3620 IEMOP_HLP_NO_LOCK_PREFIX();
3621
3622 IEM_MC_BEGIN(0, 0);
3623 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3624 IEM_MC_REL_JMP_S32(i32Imm);
3625 } IEM_MC_ELSE() {
3626 IEM_MC_ADVANCE_RIP();
3627 } IEM_MC_ENDIF();
3628 IEM_MC_END();
3629 }
3630 return VINF_SUCCESS;
3631}
3632
3633
3634/** Opcode 0x0f 0x8f. */
3635FNIEMOP_DEF(iemOp_jnle_Jv)
3636{
3637 IEMOP_MNEMONIC("jnle/jg Jv");
3638 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3639 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3640 {
3641 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3642 IEMOP_HLP_NO_LOCK_PREFIX();
3643
3644 IEM_MC_BEGIN(0, 0);
3645 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3646 IEM_MC_ADVANCE_RIP();
3647 } IEM_MC_ELSE() {
3648 IEM_MC_REL_JMP_S16(i16Imm);
3649 } IEM_MC_ENDIF();
3650 IEM_MC_END();
3651 }
3652 else
3653 {
3654 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3655 IEMOP_HLP_NO_LOCK_PREFIX();
3656
3657 IEM_MC_BEGIN(0, 0);
3658 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3659 IEM_MC_ADVANCE_RIP();
3660 } IEM_MC_ELSE() {
3661 IEM_MC_REL_JMP_S32(i32Imm);
3662 } IEM_MC_ENDIF();
3663 IEM_MC_END();
3664 }
3665 return VINF_SUCCESS;
3666}
3667
3668
3669/** Opcode 0x0f 0x90. */
3670FNIEMOP_DEF(iemOp_seto_Eb)
3671{
3672 IEMOP_MNEMONIC("seto Eb");
3673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3674 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3675
3676 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3677 * any way. AMD says it's "unused", whatever that means. We're
3678 * ignoring for now. */
3679 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3680 {
3681 /* register target */
3682 IEM_MC_BEGIN(0, 0);
3683 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3684 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3685 } IEM_MC_ELSE() {
3686 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3687 } IEM_MC_ENDIF();
3688 IEM_MC_ADVANCE_RIP();
3689 IEM_MC_END();
3690 }
3691 else
3692 {
3693 /* memory target */
3694 IEM_MC_BEGIN(0, 1);
3695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3697 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3698 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3699 } IEM_MC_ELSE() {
3700 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3701 } IEM_MC_ENDIF();
3702 IEM_MC_ADVANCE_RIP();
3703 IEM_MC_END();
3704 }
3705 return VINF_SUCCESS;
3706}
3707
3708
3709/** Opcode 0x0f 0x91. */
3710FNIEMOP_DEF(iemOp_setno_Eb)
3711{
3712 IEMOP_MNEMONIC("setno Eb");
3713 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3714 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3715
3716 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3717 * any way. AMD says it's "unused", whatever that means. We're
3718 * ignoring for now. */
3719 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3720 {
3721 /* register target */
3722 IEM_MC_BEGIN(0, 0);
3723 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3724 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3725 } IEM_MC_ELSE() {
3726 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3727 } IEM_MC_ENDIF();
3728 IEM_MC_ADVANCE_RIP();
3729 IEM_MC_END();
3730 }
3731 else
3732 {
3733 /* memory target */
3734 IEM_MC_BEGIN(0, 1);
3735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3737 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3738 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3739 } IEM_MC_ELSE() {
3740 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3741 } IEM_MC_ENDIF();
3742 IEM_MC_ADVANCE_RIP();
3743 IEM_MC_END();
3744 }
3745 return VINF_SUCCESS;
3746}
3747
3748
3749/** Opcode 0x0f 0x92. */
3750FNIEMOP_DEF(iemOp_setc_Eb)
3751{
3752 IEMOP_MNEMONIC("setc Eb");
3753 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3754 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3755
3756 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3757 * any way. AMD says it's "unused", whatever that means. We're
3758 * ignoring for now. */
3759 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3760 {
3761 /* register target */
3762 IEM_MC_BEGIN(0, 0);
3763 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3764 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3765 } IEM_MC_ELSE() {
3766 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3767 } IEM_MC_ENDIF();
3768 IEM_MC_ADVANCE_RIP();
3769 IEM_MC_END();
3770 }
3771 else
3772 {
3773 /* memory target */
3774 IEM_MC_BEGIN(0, 1);
3775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3777 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3778 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3779 } IEM_MC_ELSE() {
3780 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3781 } IEM_MC_ENDIF();
3782 IEM_MC_ADVANCE_RIP();
3783 IEM_MC_END();
3784 }
3785 return VINF_SUCCESS;
3786}
3787
3788
3789/** Opcode 0x0f 0x93. */
3790FNIEMOP_DEF(iemOp_setnc_Eb)
3791{
3792 IEMOP_MNEMONIC("setnc Eb");
3793 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3794 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3795
3796 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3797 * any way. AMD says it's "unused", whatever that means. We're
3798 * ignoring for now. */
3799 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3800 {
3801 /* register target */
3802 IEM_MC_BEGIN(0, 0);
3803 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3804 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3805 } IEM_MC_ELSE() {
3806 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3807 } IEM_MC_ENDIF();
3808 IEM_MC_ADVANCE_RIP();
3809 IEM_MC_END();
3810 }
3811 else
3812 {
3813 /* memory target */
3814 IEM_MC_BEGIN(0, 1);
3815 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3817 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3818 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3819 } IEM_MC_ELSE() {
3820 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3821 } IEM_MC_ENDIF();
3822 IEM_MC_ADVANCE_RIP();
3823 IEM_MC_END();
3824 }
3825 return VINF_SUCCESS;
3826}
3827
3828
3829/** Opcode 0x0f 0x94. */
3830FNIEMOP_DEF(iemOp_sete_Eb)
3831{
3832 IEMOP_MNEMONIC("sete Eb");
3833 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3834 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3835
3836 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3837 * any way. AMD says it's "unused", whatever that means. We're
3838 * ignoring for now. */
3839 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3840 {
3841 /* register target */
3842 IEM_MC_BEGIN(0, 0);
3843 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3844 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3845 } IEM_MC_ELSE() {
3846 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3847 } IEM_MC_ENDIF();
3848 IEM_MC_ADVANCE_RIP();
3849 IEM_MC_END();
3850 }
3851 else
3852 {
3853 /* memory target */
3854 IEM_MC_BEGIN(0, 1);
3855 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3856 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3857 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3858 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3859 } IEM_MC_ELSE() {
3860 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3861 } IEM_MC_ENDIF();
3862 IEM_MC_ADVANCE_RIP();
3863 IEM_MC_END();
3864 }
3865 return VINF_SUCCESS;
3866}
3867
3868
3869/** Opcode 0x0f 0x95. */
3870FNIEMOP_DEF(iemOp_setne_Eb)
3871{
3872 IEMOP_MNEMONIC("setne Eb");
3873 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3874 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3875
3876 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3877 * any way. AMD says it's "unused", whatever that means. We're
3878 * ignoring for now. */
3879 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3880 {
3881 /* register target */
3882 IEM_MC_BEGIN(0, 0);
3883 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3884 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3885 } IEM_MC_ELSE() {
3886 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3887 } IEM_MC_ENDIF();
3888 IEM_MC_ADVANCE_RIP();
3889 IEM_MC_END();
3890 }
3891 else
3892 {
3893 /* memory target */
3894 IEM_MC_BEGIN(0, 1);
3895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3897 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3898 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3899 } IEM_MC_ELSE() {
3900 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3901 } IEM_MC_ENDIF();
3902 IEM_MC_ADVANCE_RIP();
3903 IEM_MC_END();
3904 }
3905 return VINF_SUCCESS;
3906}
3907
3908
3909/** Opcode 0x0f 0x96. */
3910FNIEMOP_DEF(iemOp_setbe_Eb)
3911{
3912 IEMOP_MNEMONIC("setbe Eb");
3913 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3914 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3915
3916 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3917 * any way. AMD says it's "unused", whatever that means. We're
3918 * ignoring for now. */
3919 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3920 {
3921 /* register target */
3922 IEM_MC_BEGIN(0, 0);
3923 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3924 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3925 } IEM_MC_ELSE() {
3926 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3927 } IEM_MC_ENDIF();
3928 IEM_MC_ADVANCE_RIP();
3929 IEM_MC_END();
3930 }
3931 else
3932 {
3933 /* memory target */
3934 IEM_MC_BEGIN(0, 1);
3935 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3937 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3938 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3939 } IEM_MC_ELSE() {
3940 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3941 } IEM_MC_ENDIF();
3942 IEM_MC_ADVANCE_RIP();
3943 IEM_MC_END();
3944 }
3945 return VINF_SUCCESS;
3946}
3947
3948
3949/** Opcode 0x0f 0x97. */
3950FNIEMOP_DEF(iemOp_setnbe_Eb)
3951{
3952 IEMOP_MNEMONIC("setnbe Eb");
3953 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3954 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3955
3956 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3957 * any way. AMD says it's "unused", whatever that means. We're
3958 * ignoring for now. */
3959 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3960 {
3961 /* register target */
3962 IEM_MC_BEGIN(0, 0);
3963 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3964 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3965 } IEM_MC_ELSE() {
3966 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3967 } IEM_MC_ENDIF();
3968 IEM_MC_ADVANCE_RIP();
3969 IEM_MC_END();
3970 }
3971 else
3972 {
3973 /* memory target */
3974 IEM_MC_BEGIN(0, 1);
3975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3976 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3977 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3978 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3979 } IEM_MC_ELSE() {
3980 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3981 } IEM_MC_ENDIF();
3982 IEM_MC_ADVANCE_RIP();
3983 IEM_MC_END();
3984 }
3985 return VINF_SUCCESS;
3986}
3987
3988
3989/** Opcode 0x0f 0x98. */
3990FNIEMOP_DEF(iemOp_sets_Eb)
3991{
3992 IEMOP_MNEMONIC("sets Eb");
3993 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3994 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3995
3996 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3997 * any way. AMD says it's "unused", whatever that means. We're
3998 * ignoring for now. */
3999 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4000 {
4001 /* register target */
4002 IEM_MC_BEGIN(0, 0);
4003 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4004 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4005 } IEM_MC_ELSE() {
4006 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4007 } IEM_MC_ENDIF();
4008 IEM_MC_ADVANCE_RIP();
4009 IEM_MC_END();
4010 }
4011 else
4012 {
4013 /* memory target */
4014 IEM_MC_BEGIN(0, 1);
4015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4017 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4018 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4019 } IEM_MC_ELSE() {
4020 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4021 } IEM_MC_ENDIF();
4022 IEM_MC_ADVANCE_RIP();
4023 IEM_MC_END();
4024 }
4025 return VINF_SUCCESS;
4026}
4027
4028
4029/** Opcode 0x0f 0x99. */
4030FNIEMOP_DEF(iemOp_setns_Eb)
4031{
4032 IEMOP_MNEMONIC("setns Eb");
4033 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4034 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4035
4036 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4037 * any way. AMD says it's "unused", whatever that means. We're
4038 * ignoring for now. */
4039 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4040 {
4041 /* register target */
4042 IEM_MC_BEGIN(0, 0);
4043 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4044 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4045 } IEM_MC_ELSE() {
4046 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4047 } IEM_MC_ENDIF();
4048 IEM_MC_ADVANCE_RIP();
4049 IEM_MC_END();
4050 }
4051 else
4052 {
4053 /* memory target */
4054 IEM_MC_BEGIN(0, 1);
4055 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4057 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4058 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4059 } IEM_MC_ELSE() {
4060 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4061 } IEM_MC_ENDIF();
4062 IEM_MC_ADVANCE_RIP();
4063 IEM_MC_END();
4064 }
4065 return VINF_SUCCESS;
4066}
4067
4068
4069/** Opcode 0x0f 0x9a. */
4070FNIEMOP_DEF(iemOp_setp_Eb)
4071{
4072 IEMOP_MNEMONIC("setnp Eb");
4073 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4074 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4075
4076 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4077 * any way. AMD says it's "unused", whatever that means. We're
4078 * ignoring for now. */
4079 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4080 {
4081 /* register target */
4082 IEM_MC_BEGIN(0, 0);
4083 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4084 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4085 } IEM_MC_ELSE() {
4086 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4087 } IEM_MC_ENDIF();
4088 IEM_MC_ADVANCE_RIP();
4089 IEM_MC_END();
4090 }
4091 else
4092 {
4093 /* memory target */
4094 IEM_MC_BEGIN(0, 1);
4095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4097 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4098 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4099 } IEM_MC_ELSE() {
4100 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4101 } IEM_MC_ENDIF();
4102 IEM_MC_ADVANCE_RIP();
4103 IEM_MC_END();
4104 }
4105 return VINF_SUCCESS;
4106}
4107
4108
4109/** Opcode 0x0f 0x9b. */
4110FNIEMOP_DEF(iemOp_setnp_Eb)
4111{
4112 IEMOP_MNEMONIC("setnp Eb");
4113 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4114 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4115
4116 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4117 * any way. AMD says it's "unused", whatever that means. We're
4118 * ignoring for now. */
4119 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4120 {
4121 /* register target */
4122 IEM_MC_BEGIN(0, 0);
4123 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4124 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4125 } IEM_MC_ELSE() {
4126 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4127 } IEM_MC_ENDIF();
4128 IEM_MC_ADVANCE_RIP();
4129 IEM_MC_END();
4130 }
4131 else
4132 {
4133 /* memory target */
4134 IEM_MC_BEGIN(0, 1);
4135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4136 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4137 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4138 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4139 } IEM_MC_ELSE() {
4140 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4141 } IEM_MC_ENDIF();
4142 IEM_MC_ADVANCE_RIP();
4143 IEM_MC_END();
4144 }
4145 return VINF_SUCCESS;
4146}
4147
4148
4149/** Opcode 0x0f 0x9c. */
4150FNIEMOP_DEF(iemOp_setl_Eb)
4151{
4152 IEMOP_MNEMONIC("setl Eb");
4153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4154 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4155
4156 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4157 * any way. AMD says it's "unused", whatever that means. We're
4158 * ignoring for now. */
4159 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4160 {
4161 /* register target */
4162 IEM_MC_BEGIN(0, 0);
4163 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4164 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4165 } IEM_MC_ELSE() {
4166 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4167 } IEM_MC_ENDIF();
4168 IEM_MC_ADVANCE_RIP();
4169 IEM_MC_END();
4170 }
4171 else
4172 {
4173 /* memory target */
4174 IEM_MC_BEGIN(0, 1);
4175 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4177 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4178 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4179 } IEM_MC_ELSE() {
4180 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4181 } IEM_MC_ENDIF();
4182 IEM_MC_ADVANCE_RIP();
4183 IEM_MC_END();
4184 }
4185 return VINF_SUCCESS;
4186}
4187
4188
4189/** Opcode 0x0f 0x9d. */
4190FNIEMOP_DEF(iemOp_setnl_Eb)
4191{
4192 IEMOP_MNEMONIC("setnl Eb");
4193 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4194 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4195
4196 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4197 * any way. AMD says it's "unused", whatever that means. We're
4198 * ignoring for now. */
4199 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4200 {
4201 /* register target */
4202 IEM_MC_BEGIN(0, 0);
4203 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4204 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4205 } IEM_MC_ELSE() {
4206 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4207 } IEM_MC_ENDIF();
4208 IEM_MC_ADVANCE_RIP();
4209 IEM_MC_END();
4210 }
4211 else
4212 {
4213 /* memory target */
4214 IEM_MC_BEGIN(0, 1);
4215 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4217 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4218 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4219 } IEM_MC_ELSE() {
4220 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4221 } IEM_MC_ENDIF();
4222 IEM_MC_ADVANCE_RIP();
4223 IEM_MC_END();
4224 }
4225 return VINF_SUCCESS;
4226}
4227
4228
4229/** Opcode 0x0f 0x9e. */
4230FNIEMOP_DEF(iemOp_setle_Eb)
4231{
4232 IEMOP_MNEMONIC("setle Eb");
4233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4234 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4235
4236 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4237 * any way. AMD says it's "unused", whatever that means. We're
4238 * ignoring for now. */
4239 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4240 {
4241 /* register target */
4242 IEM_MC_BEGIN(0, 0);
4243 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4244 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4245 } IEM_MC_ELSE() {
4246 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4247 } IEM_MC_ENDIF();
4248 IEM_MC_ADVANCE_RIP();
4249 IEM_MC_END();
4250 }
4251 else
4252 {
4253 /* memory target */
4254 IEM_MC_BEGIN(0, 1);
4255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4257 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4258 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4259 } IEM_MC_ELSE() {
4260 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4261 } IEM_MC_ENDIF();
4262 IEM_MC_ADVANCE_RIP();
4263 IEM_MC_END();
4264 }
4265 return VINF_SUCCESS;
4266}
4267
4268
4269/** Opcode 0x0f 0x9f. */
4270FNIEMOP_DEF(iemOp_setnle_Eb)
4271{
4272 IEMOP_MNEMONIC("setnle Eb");
4273 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4274 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4275
4276 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4277 * any way. AMD says it's "unused", whatever that means. We're
4278 * ignoring for now. */
4279 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4280 {
4281 /* register target */
4282 IEM_MC_BEGIN(0, 0);
4283 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4284 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4285 } IEM_MC_ELSE() {
4286 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4287 } IEM_MC_ENDIF();
4288 IEM_MC_ADVANCE_RIP();
4289 IEM_MC_END();
4290 }
4291 else
4292 {
4293 /* memory target */
4294 IEM_MC_BEGIN(0, 1);
4295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4297 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4298 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4299 } IEM_MC_ELSE() {
4300 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4301 } IEM_MC_ENDIF();
4302 IEM_MC_ADVANCE_RIP();
4303 IEM_MC_END();
4304 }
4305 return VINF_SUCCESS;
4306}
4307
4308
4309/**
4310 * Common 'push segment-register' helper.
4311 */
4312FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4313{
4314 IEMOP_HLP_NO_LOCK_PREFIX();
4315 if (iReg < X86_SREG_FS)
4316 IEMOP_HLP_NO_64BIT();
4317 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4318
4319 switch (pIemCpu->enmEffOpSize)
4320 {
4321 case IEMMODE_16BIT:
4322 IEM_MC_BEGIN(0, 1);
4323 IEM_MC_LOCAL(uint16_t, u16Value);
4324 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4325 IEM_MC_PUSH_U16(u16Value);
4326 IEM_MC_ADVANCE_RIP();
4327 IEM_MC_END();
4328 break;
4329
4330 case IEMMODE_32BIT:
4331 IEM_MC_BEGIN(0, 1);
4332 IEM_MC_LOCAL(uint32_t, u32Value);
4333 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4334 IEM_MC_PUSH_U32_SREG(u32Value);
4335 IEM_MC_ADVANCE_RIP();
4336 IEM_MC_END();
4337 break;
4338
4339 case IEMMODE_64BIT:
4340 IEM_MC_BEGIN(0, 1);
4341 IEM_MC_LOCAL(uint64_t, u64Value);
4342 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4343 IEM_MC_PUSH_U64(u64Value);
4344 IEM_MC_ADVANCE_RIP();
4345 IEM_MC_END();
4346 break;
4347 }
4348
4349 return VINF_SUCCESS;
4350}
4351
4352
4353/** Opcode 0x0f 0xa0. */
4354FNIEMOP_DEF(iemOp_push_fs)
4355{
4356 IEMOP_MNEMONIC("push fs");
4357 IEMOP_HLP_NO_LOCK_PREFIX();
4358 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4359}
4360
4361
4362/** Opcode 0x0f 0xa1. */
4363FNIEMOP_DEF(iemOp_pop_fs)
4364{
4365 IEMOP_MNEMONIC("pop fs");
4366 IEMOP_HLP_NO_LOCK_PREFIX();
4367 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pIemCpu->enmEffOpSize);
4368}
4369
4370
4371/** Opcode 0x0f 0xa2. */
4372FNIEMOP_DEF(iemOp_cpuid)
4373{
4374 IEMOP_MNEMONIC("cpuid");
4375 IEMOP_HLP_NO_LOCK_PREFIX();
4376 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4377}
4378
4379
4380/**
4381 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4382 * iemOp_bts_Ev_Gv.
4383 */
4384FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4385{
4386 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4387 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4388
4389 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4390 {
4391 /* register destination. */
4392 IEMOP_HLP_NO_LOCK_PREFIX();
4393 switch (pIemCpu->enmEffOpSize)
4394 {
4395 case IEMMODE_16BIT:
4396 IEM_MC_BEGIN(3, 0);
4397 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4398 IEM_MC_ARG(uint16_t, u16Src, 1);
4399 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4400
4401 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4402 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4403 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4404 IEM_MC_REF_EFLAGS(pEFlags);
4405 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4406
4407 IEM_MC_ADVANCE_RIP();
4408 IEM_MC_END();
4409 return VINF_SUCCESS;
4410
4411 case IEMMODE_32BIT:
4412 IEM_MC_BEGIN(3, 0);
4413 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4414 IEM_MC_ARG(uint32_t, u32Src, 1);
4415 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4416
4417 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4418 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4419 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4420 IEM_MC_REF_EFLAGS(pEFlags);
4421 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4422
4423 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4424 IEM_MC_ADVANCE_RIP();
4425 IEM_MC_END();
4426 return VINF_SUCCESS;
4427
4428 case IEMMODE_64BIT:
4429 IEM_MC_BEGIN(3, 0);
4430 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4431 IEM_MC_ARG(uint64_t, u64Src, 1);
4432 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4433
4434 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4435 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4436 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4437 IEM_MC_REF_EFLAGS(pEFlags);
4438 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4439
4440 IEM_MC_ADVANCE_RIP();
4441 IEM_MC_END();
4442 return VINF_SUCCESS;
4443
4444 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4445 }
4446 }
4447 else
4448 {
4449 /* memory destination. */
4450
4451 uint32_t fAccess;
4452 if (pImpl->pfnLockedU16)
4453 fAccess = IEM_ACCESS_DATA_RW;
4454 else /* BT */
4455 {
4456 IEMOP_HLP_NO_LOCK_PREFIX();
4457 fAccess = IEM_ACCESS_DATA_R;
4458 }
4459
4460 /** @todo test negative bit offsets! */
4461 switch (pIemCpu->enmEffOpSize)
4462 {
4463 case IEMMODE_16BIT:
4464 IEM_MC_BEGIN(3, 2);
4465 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4466 IEM_MC_ARG(uint16_t, u16Src, 1);
4467 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4469 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4470
4471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4472 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4473 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4474 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4475 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4476 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4477 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4478 IEM_MC_FETCH_EFLAGS(EFlags);
4479
4480 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4481 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4482 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4483 else
4484 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4485 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4486
4487 IEM_MC_COMMIT_EFLAGS(EFlags);
4488 IEM_MC_ADVANCE_RIP();
4489 IEM_MC_END();
4490 return VINF_SUCCESS;
4491
4492 case IEMMODE_32BIT:
4493 IEM_MC_BEGIN(3, 2);
4494 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4495 IEM_MC_ARG(uint32_t, u32Src, 1);
4496 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4498 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4499
4500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4501 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4502 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4503 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4504 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4505 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4506 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4507 IEM_MC_FETCH_EFLAGS(EFlags);
4508
4509 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4510 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4511 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4512 else
4513 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4514 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4515
4516 IEM_MC_COMMIT_EFLAGS(EFlags);
4517 IEM_MC_ADVANCE_RIP();
4518 IEM_MC_END();
4519 return VINF_SUCCESS;
4520
4521 case IEMMODE_64BIT:
4522 IEM_MC_BEGIN(3, 2);
4523 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4524 IEM_MC_ARG(uint64_t, u64Src, 1);
4525 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4527 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4528
4529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4530 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4531 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4532 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4533 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4534 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
4535 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
4536 IEM_MC_FETCH_EFLAGS(EFlags);
4537
4538 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4539 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4540 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4541 else
4542 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4543 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4544
4545 IEM_MC_COMMIT_EFLAGS(EFlags);
4546 IEM_MC_ADVANCE_RIP();
4547 IEM_MC_END();
4548 return VINF_SUCCESS;
4549
4550 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4551 }
4552 }
4553}
4554
4555
4556/** Opcode 0x0f 0xa3. */
4557FNIEMOP_DEF(iemOp_bt_Ev_Gv)
4558{
4559 IEMOP_MNEMONIC("bt Gv,Gv");
4560 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
4561}
4562
4563
4564/**
4565 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
4566 */
4567FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
4568{
4569 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4570 IEMOP_HLP_NO_LOCK_PREFIX();
4571 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4572
4573 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4574 {
4575 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4576 IEMOP_HLP_NO_LOCK_PREFIX();
4577
4578 switch (pIemCpu->enmEffOpSize)
4579 {
4580 case IEMMODE_16BIT:
4581 IEM_MC_BEGIN(4, 0);
4582 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4583 IEM_MC_ARG(uint16_t, u16Src, 1);
4584 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4585 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4586
4587 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4588 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4589 IEM_MC_REF_EFLAGS(pEFlags);
4590 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4591
4592 IEM_MC_ADVANCE_RIP();
4593 IEM_MC_END();
4594 return VINF_SUCCESS;
4595
4596 case IEMMODE_32BIT:
4597 IEM_MC_BEGIN(4, 0);
4598 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4599 IEM_MC_ARG(uint32_t, u32Src, 1);
4600 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4601 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4602
4603 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4604 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4605 IEM_MC_REF_EFLAGS(pEFlags);
4606 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4607
4608 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4609 IEM_MC_ADVANCE_RIP();
4610 IEM_MC_END();
4611 return VINF_SUCCESS;
4612
4613 case IEMMODE_64BIT:
4614 IEM_MC_BEGIN(4, 0);
4615 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4616 IEM_MC_ARG(uint64_t, u64Src, 1);
4617 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4618 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4619
4620 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4621 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4622 IEM_MC_REF_EFLAGS(pEFlags);
4623 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4624
4625 IEM_MC_ADVANCE_RIP();
4626 IEM_MC_END();
4627 return VINF_SUCCESS;
4628
4629 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4630 }
4631 }
4632 else
4633 {
4634 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4635
4636 switch (pIemCpu->enmEffOpSize)
4637 {
4638 case IEMMODE_16BIT:
4639 IEM_MC_BEGIN(4, 2);
4640 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4641 IEM_MC_ARG(uint16_t, u16Src, 1);
4642 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4643 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4645
4646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4647 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4648 IEM_MC_ASSIGN(cShiftArg, cShift);
4649 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4650 IEM_MC_FETCH_EFLAGS(EFlags);
4651 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4652 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4653
4654 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4655 IEM_MC_COMMIT_EFLAGS(EFlags);
4656 IEM_MC_ADVANCE_RIP();
4657 IEM_MC_END();
4658 return VINF_SUCCESS;
4659
4660 case IEMMODE_32BIT:
4661 IEM_MC_BEGIN(4, 2);
4662 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4663 IEM_MC_ARG(uint32_t, u32Src, 1);
4664 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4665 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4667
4668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4669 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4670 IEM_MC_ASSIGN(cShiftArg, cShift);
4671 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4672 IEM_MC_FETCH_EFLAGS(EFlags);
4673 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4674 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4675
4676 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4677 IEM_MC_COMMIT_EFLAGS(EFlags);
4678 IEM_MC_ADVANCE_RIP();
4679 IEM_MC_END();
4680 return VINF_SUCCESS;
4681
4682 case IEMMODE_64BIT:
4683 IEM_MC_BEGIN(4, 2);
4684 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4685 IEM_MC_ARG(uint64_t, u64Src, 1);
4686 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4687 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4689
4690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4691 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4692 IEM_MC_ASSIGN(cShiftArg, cShift);
4693 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4694 IEM_MC_FETCH_EFLAGS(EFlags);
4695 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4696 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4697
4698 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4699 IEM_MC_COMMIT_EFLAGS(EFlags);
4700 IEM_MC_ADVANCE_RIP();
4701 IEM_MC_END();
4702 return VINF_SUCCESS;
4703
4704 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4705 }
4706 }
4707}
4708
4709
4710/**
4711 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
4712 */
4713FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
4714{
4715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4716 IEMOP_HLP_NO_LOCK_PREFIX();
4717 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4718
4719 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4720 {
4721 IEMOP_HLP_NO_LOCK_PREFIX();
4722
4723 switch (pIemCpu->enmEffOpSize)
4724 {
4725 case IEMMODE_16BIT:
4726 IEM_MC_BEGIN(4, 0);
4727 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4728 IEM_MC_ARG(uint16_t, u16Src, 1);
4729 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4730 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4731
4732 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4733 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4734 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4735 IEM_MC_REF_EFLAGS(pEFlags);
4736 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4737
4738 IEM_MC_ADVANCE_RIP();
4739 IEM_MC_END();
4740 return VINF_SUCCESS;
4741
4742 case IEMMODE_32BIT:
4743 IEM_MC_BEGIN(4, 0);
4744 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4745 IEM_MC_ARG(uint32_t, u32Src, 1);
4746 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4747 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4748
4749 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4750 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4751 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4752 IEM_MC_REF_EFLAGS(pEFlags);
4753 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4754
4755 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4756 IEM_MC_ADVANCE_RIP();
4757 IEM_MC_END();
4758 return VINF_SUCCESS;
4759
4760 case IEMMODE_64BIT:
4761 IEM_MC_BEGIN(4, 0);
4762 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4763 IEM_MC_ARG(uint64_t, u64Src, 1);
4764 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4765 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4766
4767 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4768 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4769 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4770 IEM_MC_REF_EFLAGS(pEFlags);
4771 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4772
4773 IEM_MC_ADVANCE_RIP();
4774 IEM_MC_END();
4775 return VINF_SUCCESS;
4776
4777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4778 }
4779 }
4780 else
4781 {
4782 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4783
4784 switch (pIemCpu->enmEffOpSize)
4785 {
4786 case IEMMODE_16BIT:
4787 IEM_MC_BEGIN(4, 2);
4788 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4789 IEM_MC_ARG(uint16_t, u16Src, 1);
4790 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4791 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4793
4794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4795 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4796 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4797 IEM_MC_FETCH_EFLAGS(EFlags);
4798 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4799 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4800
4801 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4802 IEM_MC_COMMIT_EFLAGS(EFlags);
4803 IEM_MC_ADVANCE_RIP();
4804 IEM_MC_END();
4805 return VINF_SUCCESS;
4806
4807 case IEMMODE_32BIT:
4808 IEM_MC_BEGIN(4, 2);
4809 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4810 IEM_MC_ARG(uint32_t, u32Src, 1);
4811 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4812 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4814
4815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4816 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4817 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4818 IEM_MC_FETCH_EFLAGS(EFlags);
4819 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4820 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4821
4822 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4823 IEM_MC_COMMIT_EFLAGS(EFlags);
4824 IEM_MC_ADVANCE_RIP();
4825 IEM_MC_END();
4826 return VINF_SUCCESS;
4827
4828 case IEMMODE_64BIT:
4829 IEM_MC_BEGIN(4, 2);
4830 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4831 IEM_MC_ARG(uint64_t, u64Src, 1);
4832 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4833 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4835
4836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4837 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4838 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4839 IEM_MC_FETCH_EFLAGS(EFlags);
4840 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4841 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4842
4843 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4844 IEM_MC_COMMIT_EFLAGS(EFlags);
4845 IEM_MC_ADVANCE_RIP();
4846 IEM_MC_END();
4847 return VINF_SUCCESS;
4848
4849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4850 }
4851 }
4852}
4853
4854
4855
4856/** Opcode 0x0f 0xa4. */
4857FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
4858{
4859 IEMOP_MNEMONIC("shld Ev,Gv,Ib");
4860 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
4861}
4862
4863
4864/** Opcode 0x0f 0xa7. */
4865FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
4866{
4867 IEMOP_MNEMONIC("shld Ev,Gv,CL");
4868 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
4869}
4870
4871
4872/** Opcode 0x0f 0xa8. */
4873FNIEMOP_DEF(iemOp_push_gs)
4874{
4875 IEMOP_MNEMONIC("push gs");
4876 IEMOP_HLP_NO_LOCK_PREFIX();
4877 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
4878}
4879
4880
4881/** Opcode 0x0f 0xa9. */
4882FNIEMOP_DEF(iemOp_pop_gs)
4883{
4884 IEMOP_MNEMONIC("pop gs");
4885 IEMOP_HLP_NO_LOCK_PREFIX();
4886 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pIemCpu->enmEffOpSize);
4887}
4888
4889
4890/** Opcode 0x0f 0xaa. */
4891FNIEMOP_STUB(iemOp_rsm);
4892
4893
4894/** Opcode 0x0f 0xab. */
4895FNIEMOP_DEF(iemOp_bts_Ev_Gv)
4896{
4897 IEMOP_MNEMONIC("bts Ev,Gv");
4898 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
4899}
4900
4901
4902/** Opcode 0x0f 0xac. */
4903FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
4904{
4905 IEMOP_MNEMONIC("shrd Ev,Gv,Ib");
4906 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
4907}
4908
4909
4910/** Opcode 0x0f 0xad. */
4911FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
4912{
4913 IEMOP_MNEMONIC("shrd Ev,Gv,CL");
4914 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
4915}
4916
4917
4918/** Opcode 0x0f 0xae mem/0. */
4919FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
4920{
4921 IEMOP_MNEMONIC("fxsave m512");
4922 IEMOP_HLP_NO_LOCK_PREFIX();
4923 if (!IEM_IS_INTEL_CPUID_FEATURE_PRESENT_EDX(X86_CPUID_FEATURE_EDX_FXSR))
4924 return IEMOP_RAISE_INVALID_OPCODE();
4925
4926 IEM_MC_BEGIN(3, 1);
4927 IEM_MC_ARG_CONST(uint8_t, iEffSeg,/*=*/pIemCpu->iEffSeg, 0);
4928 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4929 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
4930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4931 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
4932 IEM_MC_END();
4933 return VINF_SUCCESS;
4934}
4935
4936
4937/** Opcode 0x0f 0xae mem/1. */
4938FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
4939{
4940 IEMOP_MNEMONIC("fxrstor m512");
4941 IEMOP_HLP_NO_LOCK_PREFIX();
4942 if (!IEM_IS_INTEL_CPUID_FEATURE_PRESENT_EDX(X86_CPUID_FEATURE_EDX_FXSR))
4943 return IEMOP_RAISE_INVALID_OPCODE();
4944
4945 IEM_MC_BEGIN(3, 1);
4946 IEM_MC_ARG_CONST(uint8_t, iEffSeg,/*=*/pIemCpu->iEffSeg, 0);
4947 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4948 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
4949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4950 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
4951 IEM_MC_END();
4952 return VINF_SUCCESS;
4953}
4954
4955
4956/** Opcode 0x0f 0xae mem/2. */
4957FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
4958
4959/** Opcode 0x0f 0xae mem/3. */
4960FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
4961
4962/** Opcode 0x0f 0xae mem/4. */
4963FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
4964
4965/** Opcode 0x0f 0xae mem/5. */
4966FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
4967
4968/** Opcode 0x0f 0xae mem/6. */
4969FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
4970
4971/** Opcode 0x0f 0xae mem/7. */
4972FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
4973
4974
4975/** Opcode 0x0f 0xae 11b/5. */
4976FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
4977{
4978 IEMOP_MNEMONIC("lfence");
4979 IEMOP_HLP_NO_LOCK_PREFIX();
4980 if (!IEM_IS_INTEL_CPUID_FEATURE_PRESENT_EDX(X86_CPUID_FEATURE_EDX_SSE2))
4981 return IEMOP_RAISE_INVALID_OPCODE();
4982
4983 IEM_MC_BEGIN(0, 0);
4984 if (IEM_IS_INTEL_CPUID_FEATURE_PRESENT_EDX_ON_HOST(X86_CPUID_FEATURE_EDX_SSE2))
4985 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
4986 else
4987 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
4988 IEM_MC_ADVANCE_RIP();
4989 IEM_MC_END();
4990 return VINF_SUCCESS;
4991}
4992
4993
4994/** Opcode 0x0f 0xae 11b/6. */
4995FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
4996{
4997 IEMOP_MNEMONIC("mfence");
4998 IEMOP_HLP_NO_LOCK_PREFIX();
4999 if (!IEM_IS_INTEL_CPUID_FEATURE_PRESENT_EDX(X86_CPUID_FEATURE_EDX_SSE2))
5000 return IEMOP_RAISE_INVALID_OPCODE();
5001
5002 IEM_MC_BEGIN(0, 0);
5003 if (IEM_IS_INTEL_CPUID_FEATURE_PRESENT_EDX_ON_HOST(X86_CPUID_FEATURE_EDX_SSE2))
5004 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5005 else
5006 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5007 IEM_MC_ADVANCE_RIP();
5008 IEM_MC_END();
5009 return VINF_SUCCESS;
5010}
5011
5012
5013/** Opcode 0x0f 0xae 11b/7. */
5014FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5015{
5016 IEMOP_MNEMONIC("sfence");
5017 IEMOP_HLP_NO_LOCK_PREFIX();
5018 if (!IEM_IS_INTEL_CPUID_FEATURE_PRESENT_EDX(X86_CPUID_FEATURE_EDX_SSE2))
5019 return IEMOP_RAISE_INVALID_OPCODE();
5020
5021 IEM_MC_BEGIN(0, 0);
5022 if (IEM_IS_INTEL_CPUID_FEATURE_PRESENT_EDX_ON_HOST(X86_CPUID_FEATURE_EDX_SSE2))
5023 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5024 else
5025 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5026 IEM_MC_ADVANCE_RIP();
5027 IEM_MC_END();
5028 return VINF_SUCCESS;
5029}
5030
5031
5032/** Opcode 0xf3 0x0f 0xae 11b/0. */
5033FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5034
5035/** Opcode 0xf3 0x0f 0xae 11b/1. */
5036FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5037
5038/** Opcode 0xf3 0x0f 0xae 11b/2. */
5039FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5040
5041/** Opcode 0xf3 0x0f 0xae 11b/3. */
5042FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5043
5044
5045/** Opcode 0x0f 0xae. */
5046FNIEMOP_DEF(iemOp_Grp15)
5047{
5048 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5049 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5050 {
5051 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5052 {
5053 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5054 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5055 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5056 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5057 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5058 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5059 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5060 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5061 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5062 }
5063 }
5064 else
5065 {
5066 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5067 {
5068 case 0:
5069 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5070 {
5071 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5072 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5073 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5074 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5075 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5076 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5077 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5078 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5079 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5080 }
5081 break;
5082
5083 case IEM_OP_PRF_REPZ:
5084 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5085 {
5086 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5087 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5088 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5089 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5090 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5091 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5092 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5093 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5094 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5095 }
5096 break;
5097
5098 default:
5099 return IEMOP_RAISE_INVALID_OPCODE();
5100 }
5101 }
5102}
5103
5104
5105/** Opcode 0x0f 0xaf. */
5106FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5107{
5108 IEMOP_MNEMONIC("imul Gv,Ev");
5109 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5110 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5111}
5112
5113
5114/** Opcode 0x0f 0xb0. */
5115FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5116{
5117 IEMOP_MNEMONIC("cmpxchg Eb,Gb");
5118 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5119
5120 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5121 {
5122 IEMOP_HLP_DONE_DECODING();
5123 IEM_MC_BEGIN(4, 0);
5124 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5125 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5126 IEM_MC_ARG(uint8_t, u8Src, 2);
5127 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5128
5129 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5130 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5131 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5132 IEM_MC_REF_EFLAGS(pEFlags);
5133 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5134 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5135 else
5136 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5137
5138 IEM_MC_ADVANCE_RIP();
5139 IEM_MC_END();
5140 }
5141 else
5142 {
5143 IEM_MC_BEGIN(4, 3);
5144 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5145 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5146 IEM_MC_ARG(uint8_t, u8Src, 2);
5147 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5149 IEM_MC_LOCAL(uint8_t, u8Al);
5150
5151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5152 IEMOP_HLP_DONE_DECODING();
5153 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5154 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5155 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5156 IEM_MC_FETCH_EFLAGS(EFlags);
5157 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5158 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5159 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5160 else
5161 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5162
5163 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5164 IEM_MC_COMMIT_EFLAGS(EFlags);
5165 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5166 IEM_MC_ADVANCE_RIP();
5167 IEM_MC_END();
5168 }
5169 return VINF_SUCCESS;
5170}
5171
5172/** Opcode 0x0f 0xb1. */
5173FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5174{
5175 IEMOP_MNEMONIC("cmpxchg Ev,Gv");
5176 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5177
5178 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5179 {
5180 IEMOP_HLP_DONE_DECODING();
5181 switch (pIemCpu->enmEffOpSize)
5182 {
5183 case IEMMODE_16BIT:
5184 IEM_MC_BEGIN(4, 0);
5185 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5186 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5187 IEM_MC_ARG(uint16_t, u16Src, 2);
5188 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5189
5190 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5191 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5192 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5193 IEM_MC_REF_EFLAGS(pEFlags);
5194 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5195 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5196 else
5197 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5198
5199 IEM_MC_ADVANCE_RIP();
5200 IEM_MC_END();
5201 return VINF_SUCCESS;
5202
5203 case IEMMODE_32BIT:
5204 IEM_MC_BEGIN(4, 0);
5205 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5206 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5207 IEM_MC_ARG(uint32_t, u32Src, 2);
5208 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5209
5210 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5211 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5212 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5213 IEM_MC_REF_EFLAGS(pEFlags);
5214 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5215 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5216 else
5217 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5218
5219 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5220 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5221 IEM_MC_ADVANCE_RIP();
5222 IEM_MC_END();
5223 return VINF_SUCCESS;
5224
5225 case IEMMODE_64BIT:
5226 IEM_MC_BEGIN(4, 0);
5227 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5228 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5229#ifdef RT_ARCH_X86
5230 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5231#else
5232 IEM_MC_ARG(uint64_t, u64Src, 2);
5233#endif
5234 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5235
5236 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5237 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5238 IEM_MC_REF_EFLAGS(pEFlags);
5239#ifdef RT_ARCH_X86
5240 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5241 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5242 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5243 else
5244 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5245#else
5246 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5247 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5248 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5249 else
5250 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5251#endif
5252
5253 IEM_MC_ADVANCE_RIP();
5254 IEM_MC_END();
5255 return VINF_SUCCESS;
5256
5257 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5258 }
5259 }
5260 else
5261 {
5262 switch (pIemCpu->enmEffOpSize)
5263 {
5264 case IEMMODE_16BIT:
5265 IEM_MC_BEGIN(4, 3);
5266 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5267 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5268 IEM_MC_ARG(uint16_t, u16Src, 2);
5269 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5271 IEM_MC_LOCAL(uint16_t, u16Ax);
5272
5273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5274 IEMOP_HLP_DONE_DECODING();
5275 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5276 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5277 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5278 IEM_MC_FETCH_EFLAGS(EFlags);
5279 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5280 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5281 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5282 else
5283 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5284
5285 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5286 IEM_MC_COMMIT_EFLAGS(EFlags);
5287 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5288 IEM_MC_ADVANCE_RIP();
5289 IEM_MC_END();
5290 return VINF_SUCCESS;
5291
5292 case IEMMODE_32BIT:
5293 IEM_MC_BEGIN(4, 3);
5294 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5295 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5296 IEM_MC_ARG(uint32_t, u32Src, 2);
5297 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5298 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5299 IEM_MC_LOCAL(uint32_t, u32Eax);
5300
5301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5302 IEMOP_HLP_DONE_DECODING();
5303 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5304 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5305 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5306 IEM_MC_FETCH_EFLAGS(EFlags);
5307 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5308 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5309 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5310 else
5311 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5312
5313 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5314 IEM_MC_COMMIT_EFLAGS(EFlags);
5315 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5316 IEM_MC_ADVANCE_RIP();
5317 IEM_MC_END();
5318 return VINF_SUCCESS;
5319
5320 case IEMMODE_64BIT:
5321 IEM_MC_BEGIN(4, 3);
5322 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5323 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5324#ifdef RT_ARCH_X86
5325 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5326#else
5327 IEM_MC_ARG(uint64_t, u64Src, 2);
5328#endif
5329 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5331 IEM_MC_LOCAL(uint64_t, u64Rax);
5332
5333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5334 IEMOP_HLP_DONE_DECODING();
5335 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5336 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5337 IEM_MC_FETCH_EFLAGS(EFlags);
5338 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5339#ifdef RT_ARCH_X86
5340 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5341 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5342 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5343 else
5344 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5345#else
5346 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5347 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5348 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5349 else
5350 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5351#endif
5352
5353 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5354 IEM_MC_COMMIT_EFLAGS(EFlags);
5355 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5356 IEM_MC_ADVANCE_RIP();
5357 IEM_MC_END();
5358 return VINF_SUCCESS;
5359
5360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5361 }
5362 }
5363}
5364
5365
5366FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5367{
5368 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5369 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
5370
5371 switch (pIemCpu->enmEffOpSize)
5372 {
5373 case IEMMODE_16BIT:
5374 IEM_MC_BEGIN(5, 1);
5375 IEM_MC_ARG(uint16_t, uSel, 0);
5376 IEM_MC_ARG(uint16_t, offSeg, 1);
5377 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5378 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5379 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5380 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5383 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5384 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 2);
5385 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5386 IEM_MC_END();
5387 return VINF_SUCCESS;
5388
5389 case IEMMODE_32BIT:
5390 IEM_MC_BEGIN(5, 1);
5391 IEM_MC_ARG(uint16_t, uSel, 0);
5392 IEM_MC_ARG(uint32_t, offSeg, 1);
5393 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5394 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5395 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5396 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5399 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5400 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 4);
5401 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5402 IEM_MC_END();
5403 return VINF_SUCCESS;
5404
5405 case IEMMODE_64BIT:
5406 IEM_MC_BEGIN(5, 1);
5407 IEM_MC_ARG(uint16_t, uSel, 0);
5408 IEM_MC_ARG(uint64_t, offSeg, 1);
5409 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5410 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5411 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5412 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5415 if (IEM_IS_GUEST_CPU_AMD(pIemCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5416 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5417 else
5418 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5419 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 8);
5420 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5421 IEM_MC_END();
5422 return VINF_SUCCESS;
5423
5424 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5425 }
5426}
5427
5428
5429/** Opcode 0x0f 0xb2. */
5430FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5431{
5432 IEMOP_MNEMONIC("lss Gv,Mp");
5433 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5434 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5435 return IEMOP_RAISE_INVALID_OPCODE();
5436 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5437}
5438
5439
5440/** Opcode 0x0f 0xb3. */
5441FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5442{
5443 IEMOP_MNEMONIC("btr Ev,Gv");
5444 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5445}
5446
5447
5448/** Opcode 0x0f 0xb4. */
5449FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5450{
5451 IEMOP_MNEMONIC("lfs Gv,Mp");
5452 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5453 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5454 return IEMOP_RAISE_INVALID_OPCODE();
5455 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5456}
5457
5458
5459/** Opcode 0x0f 0xb5. */
5460FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5461{
5462 IEMOP_MNEMONIC("lgs Gv,Mp");
5463 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5464 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5465 return IEMOP_RAISE_INVALID_OPCODE();
5466 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5467}
5468
5469
5470/** Opcode 0x0f 0xb6. */
5471FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5472{
5473 IEMOP_MNEMONIC("movzx Gv,Eb");
5474
5475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5476 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5477
5478 /*
5479 * If rm is denoting a register, no more instruction bytes.
5480 */
5481 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5482 {
5483 switch (pIemCpu->enmEffOpSize)
5484 {
5485 case IEMMODE_16BIT:
5486 IEM_MC_BEGIN(0, 1);
5487 IEM_MC_LOCAL(uint16_t, u16Value);
5488 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5489 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5490 IEM_MC_ADVANCE_RIP();
5491 IEM_MC_END();
5492 return VINF_SUCCESS;
5493
5494 case IEMMODE_32BIT:
5495 IEM_MC_BEGIN(0, 1);
5496 IEM_MC_LOCAL(uint32_t, u32Value);
5497 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5498 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5499 IEM_MC_ADVANCE_RIP();
5500 IEM_MC_END();
5501 return VINF_SUCCESS;
5502
5503 case IEMMODE_64BIT:
5504 IEM_MC_BEGIN(0, 1);
5505 IEM_MC_LOCAL(uint64_t, u64Value);
5506 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5507 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5508 IEM_MC_ADVANCE_RIP();
5509 IEM_MC_END();
5510 return VINF_SUCCESS;
5511
5512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5513 }
5514 }
5515 else
5516 {
5517 /*
5518 * We're loading a register from memory.
5519 */
5520 switch (pIemCpu->enmEffOpSize)
5521 {
5522 case IEMMODE_16BIT:
5523 IEM_MC_BEGIN(0, 2);
5524 IEM_MC_LOCAL(uint16_t, u16Value);
5525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5527 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5528 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5529 IEM_MC_ADVANCE_RIP();
5530 IEM_MC_END();
5531 return VINF_SUCCESS;
5532
5533 case IEMMODE_32BIT:
5534 IEM_MC_BEGIN(0, 2);
5535 IEM_MC_LOCAL(uint32_t, u32Value);
5536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5538 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5539 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5540 IEM_MC_ADVANCE_RIP();
5541 IEM_MC_END();
5542 return VINF_SUCCESS;
5543
5544 case IEMMODE_64BIT:
5545 IEM_MC_BEGIN(0, 2);
5546 IEM_MC_LOCAL(uint64_t, u64Value);
5547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5549 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5550 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5551 IEM_MC_ADVANCE_RIP();
5552 IEM_MC_END();
5553 return VINF_SUCCESS;
5554
5555 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5556 }
5557 }
5558}
5559
5560
5561/** Opcode 0x0f 0xb7. */
5562FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
5563{
5564 IEMOP_MNEMONIC("movzx Gv,Ew");
5565
5566 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5567 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5568
5569 /** @todo Not entirely sure how the operand size prefix is handled here,
5570 * assuming that it will be ignored. Would be nice to have a few
5571 * test for this. */
5572 /*
5573 * If rm is denoting a register, no more instruction bytes.
5574 */
5575 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5576 {
5577 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5578 {
5579 IEM_MC_BEGIN(0, 1);
5580 IEM_MC_LOCAL(uint32_t, u32Value);
5581 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5582 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5583 IEM_MC_ADVANCE_RIP();
5584 IEM_MC_END();
5585 }
5586 else
5587 {
5588 IEM_MC_BEGIN(0, 1);
5589 IEM_MC_LOCAL(uint64_t, u64Value);
5590 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5591 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5592 IEM_MC_ADVANCE_RIP();
5593 IEM_MC_END();
5594 }
5595 }
5596 else
5597 {
5598 /*
5599 * We're loading a register from memory.
5600 */
5601 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5602 {
5603 IEM_MC_BEGIN(0, 2);
5604 IEM_MC_LOCAL(uint32_t, u32Value);
5605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5607 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5608 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5609 IEM_MC_ADVANCE_RIP();
5610 IEM_MC_END();
5611 }
5612 else
5613 {
5614 IEM_MC_BEGIN(0, 2);
5615 IEM_MC_LOCAL(uint64_t, u64Value);
5616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5618 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5619 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5620 IEM_MC_ADVANCE_RIP();
5621 IEM_MC_END();
5622 }
5623 }
5624 return VINF_SUCCESS;
5625}
5626
5627
5628/** Opcode 0x0f 0xb8. */
5629FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
5630
5631
5632/** Opcode 0x0f 0xb9. */
5633FNIEMOP_DEF(iemOp_Grp10)
5634{
5635 Log(("iemOp_Grp10 -> #UD\n"));
5636 return IEMOP_RAISE_INVALID_OPCODE();
5637}
5638
5639
5640/** Opcode 0x0f 0xba. */
5641FNIEMOP_DEF(iemOp_Grp8)
5642{
5643 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5644 PCIEMOPBINSIZES pImpl;
5645 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5646 {
5647 case 0: case 1: case 2: case 3:
5648 return IEMOP_RAISE_INVALID_OPCODE();
5649 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break;
5650 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break;
5651 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break;
5652 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break;
5653 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5654 }
5655 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5656
5657 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5658 {
5659 /* register destination. */
5660 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5661 IEMOP_HLP_NO_LOCK_PREFIX();
5662
5663 switch (pIemCpu->enmEffOpSize)
5664 {
5665 case IEMMODE_16BIT:
5666 IEM_MC_BEGIN(3, 0);
5667 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5668 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
5669 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5670
5671 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5672 IEM_MC_REF_EFLAGS(pEFlags);
5673 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5674
5675 IEM_MC_ADVANCE_RIP();
5676 IEM_MC_END();
5677 return VINF_SUCCESS;
5678
5679 case IEMMODE_32BIT:
5680 IEM_MC_BEGIN(3, 0);
5681 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5682 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
5683 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5684
5685 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5686 IEM_MC_REF_EFLAGS(pEFlags);
5687 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5688
5689 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5690 IEM_MC_ADVANCE_RIP();
5691 IEM_MC_END();
5692 return VINF_SUCCESS;
5693
5694 case IEMMODE_64BIT:
5695 IEM_MC_BEGIN(3, 0);
5696 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5697 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
5698 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5699
5700 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5701 IEM_MC_REF_EFLAGS(pEFlags);
5702 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5703
5704 IEM_MC_ADVANCE_RIP();
5705 IEM_MC_END();
5706 return VINF_SUCCESS;
5707
5708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5709 }
5710 }
5711 else
5712 {
5713 /* memory destination. */
5714
5715 uint32_t fAccess;
5716 if (pImpl->pfnLockedU16)
5717 fAccess = IEM_ACCESS_DATA_RW;
5718 else /* BT */
5719 {
5720 IEMOP_HLP_NO_LOCK_PREFIX();
5721 fAccess = IEM_ACCESS_DATA_R;
5722 }
5723
5724 /** @todo test negative bit offsets! */
5725 switch (pIemCpu->enmEffOpSize)
5726 {
5727 case IEMMODE_16BIT:
5728 IEM_MC_BEGIN(3, 1);
5729 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5730 IEM_MC_ARG(uint16_t, u16Src, 1);
5731 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5733
5734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5735 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5736 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
5737 IEM_MC_FETCH_EFLAGS(EFlags);
5738 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5739 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5740 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5741 else
5742 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5743 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5744
5745 IEM_MC_COMMIT_EFLAGS(EFlags);
5746 IEM_MC_ADVANCE_RIP();
5747 IEM_MC_END();
5748 return VINF_SUCCESS;
5749
5750 case IEMMODE_32BIT:
5751 IEM_MC_BEGIN(3, 1);
5752 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5753 IEM_MC_ARG(uint32_t, u32Src, 1);
5754 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5756
5757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5758 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5759 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
5760 IEM_MC_FETCH_EFLAGS(EFlags);
5761 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5762 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5763 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5764 else
5765 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5766 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5767
5768 IEM_MC_COMMIT_EFLAGS(EFlags);
5769 IEM_MC_ADVANCE_RIP();
5770 IEM_MC_END();
5771 return VINF_SUCCESS;
5772
5773 case IEMMODE_64BIT:
5774 IEM_MC_BEGIN(3, 1);
5775 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5776 IEM_MC_ARG(uint64_t, u64Src, 1);
5777 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5779
5780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5781 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5782 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
5783 IEM_MC_FETCH_EFLAGS(EFlags);
5784 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5785 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5786 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5787 else
5788 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5789 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5790
5791 IEM_MC_COMMIT_EFLAGS(EFlags);
5792 IEM_MC_ADVANCE_RIP();
5793 IEM_MC_END();
5794 return VINF_SUCCESS;
5795
5796 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5797 }
5798 }
5799
5800}
5801
5802
5803/** Opcode 0x0f 0xbb. */
5804FNIEMOP_DEF(iemOp_btc_Ev_Gv)
5805{
5806 IEMOP_MNEMONIC("btc Ev,Gv");
5807 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
5808}
5809
5810
5811/** Opcode 0x0f 0xbc. */
5812FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
5813{
5814 IEMOP_MNEMONIC("bsf Gv,Ev");
5815 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5816 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
5817}
5818
5819
5820/** Opcode 0x0f 0xbd. */
5821FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
5822{
5823 IEMOP_MNEMONIC("bsr Gv,Ev");
5824 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5825 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
5826}
5827
5828
5829/** Opcode 0x0f 0xbe. */
5830FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
5831{
5832 IEMOP_MNEMONIC("movsx Gv,Eb");
5833
5834 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5835 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5836
5837 /*
5838 * If rm is denoting a register, no more instruction bytes.
5839 */
5840 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5841 {
5842 switch (pIemCpu->enmEffOpSize)
5843 {
5844 case IEMMODE_16BIT:
5845 IEM_MC_BEGIN(0, 1);
5846 IEM_MC_LOCAL(uint16_t, u16Value);
5847 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5848 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5849 IEM_MC_ADVANCE_RIP();
5850 IEM_MC_END();
5851 return VINF_SUCCESS;
5852
5853 case IEMMODE_32BIT:
5854 IEM_MC_BEGIN(0, 1);
5855 IEM_MC_LOCAL(uint32_t, u32Value);
5856 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5857 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5858 IEM_MC_ADVANCE_RIP();
5859 IEM_MC_END();
5860 return VINF_SUCCESS;
5861
5862 case IEMMODE_64BIT:
5863 IEM_MC_BEGIN(0, 1);
5864 IEM_MC_LOCAL(uint64_t, u64Value);
5865 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5866 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5867 IEM_MC_ADVANCE_RIP();
5868 IEM_MC_END();
5869 return VINF_SUCCESS;
5870
5871 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5872 }
5873 }
5874 else
5875 {
5876 /*
5877 * We're loading a register from memory.
5878 */
5879 switch (pIemCpu->enmEffOpSize)
5880 {
5881 case IEMMODE_16BIT:
5882 IEM_MC_BEGIN(0, 2);
5883 IEM_MC_LOCAL(uint16_t, u16Value);
5884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5886 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5887 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5888 IEM_MC_ADVANCE_RIP();
5889 IEM_MC_END();
5890 return VINF_SUCCESS;
5891
5892 case IEMMODE_32BIT:
5893 IEM_MC_BEGIN(0, 2);
5894 IEM_MC_LOCAL(uint32_t, u32Value);
5895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5897 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5898 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5899 IEM_MC_ADVANCE_RIP();
5900 IEM_MC_END();
5901 return VINF_SUCCESS;
5902
5903 case IEMMODE_64BIT:
5904 IEM_MC_BEGIN(0, 2);
5905 IEM_MC_LOCAL(uint64_t, u64Value);
5906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5908 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5909 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5910 IEM_MC_ADVANCE_RIP();
5911 IEM_MC_END();
5912 return VINF_SUCCESS;
5913
5914 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5915 }
5916 }
5917}
5918
5919
5920/** Opcode 0x0f 0xbf. */
5921FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
5922{
5923 IEMOP_MNEMONIC("movsx Gv,Ew");
5924
5925 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5926 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5927
5928 /** @todo Not entirely sure how the operand size prefix is handled here,
5929 * assuming that it will be ignored. Would be nice to have a few
5930 * test for this. */
5931 /*
5932 * If rm is denoting a register, no more instruction bytes.
5933 */
5934 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5935 {
5936 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5937 {
5938 IEM_MC_BEGIN(0, 1);
5939 IEM_MC_LOCAL(uint32_t, u32Value);
5940 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5941 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5942 IEM_MC_ADVANCE_RIP();
5943 IEM_MC_END();
5944 }
5945 else
5946 {
5947 IEM_MC_BEGIN(0, 1);
5948 IEM_MC_LOCAL(uint64_t, u64Value);
5949 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5950 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5951 IEM_MC_ADVANCE_RIP();
5952 IEM_MC_END();
5953 }
5954 }
5955 else
5956 {
5957 /*
5958 * We're loading a register from memory.
5959 */
5960 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5961 {
5962 IEM_MC_BEGIN(0, 2);
5963 IEM_MC_LOCAL(uint32_t, u32Value);
5964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5966 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5967 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5968 IEM_MC_ADVANCE_RIP();
5969 IEM_MC_END();
5970 }
5971 else
5972 {
5973 IEM_MC_BEGIN(0, 2);
5974 IEM_MC_LOCAL(uint64_t, u64Value);
5975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5976 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5977 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5978 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5979 IEM_MC_ADVANCE_RIP();
5980 IEM_MC_END();
5981 }
5982 }
5983 return VINF_SUCCESS;
5984}
5985
5986
5987/** Opcode 0x0f 0xc0. */
5988FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
5989{
5990 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5991 IEMOP_MNEMONIC("xadd Eb,Gb");
5992
5993 /*
5994 * If rm is denoting a register, no more instruction bytes.
5995 */
5996 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5997 {
5998 IEMOP_HLP_NO_LOCK_PREFIX();
5999
6000 IEM_MC_BEGIN(3, 0);
6001 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6002 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6003 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6004
6005 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6006 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6007 IEM_MC_REF_EFLAGS(pEFlags);
6008 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6009
6010 IEM_MC_ADVANCE_RIP();
6011 IEM_MC_END();
6012 }
6013 else
6014 {
6015 /*
6016 * We're accessing memory.
6017 */
6018 IEM_MC_BEGIN(3, 3);
6019 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6020 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6021 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6022 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6024
6025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6026 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6027 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6028 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6029 IEM_MC_FETCH_EFLAGS(EFlags);
6030 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6031 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6032 else
6033 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6034
6035 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6036 IEM_MC_COMMIT_EFLAGS(EFlags);
6037 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8RegCopy);
6038 IEM_MC_ADVANCE_RIP();
6039 IEM_MC_END();
6040 return VINF_SUCCESS;
6041 }
6042 return VINF_SUCCESS;
6043}
6044
6045
6046/** Opcode 0x0f 0xc1. */
6047FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6048{
6049 IEMOP_MNEMONIC("xadd Ev,Gv");
6050 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6051
6052 /*
6053 * If rm is denoting a register, no more instruction bytes.
6054 */
6055 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6056 {
6057 IEMOP_HLP_NO_LOCK_PREFIX();
6058
6059 switch (pIemCpu->enmEffOpSize)
6060 {
6061 case IEMMODE_16BIT:
6062 IEM_MC_BEGIN(3, 0);
6063 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6064 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6065 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6066
6067 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6068 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6069 IEM_MC_REF_EFLAGS(pEFlags);
6070 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6071
6072 IEM_MC_ADVANCE_RIP();
6073 IEM_MC_END();
6074 return VINF_SUCCESS;
6075
6076 case IEMMODE_32BIT:
6077 IEM_MC_BEGIN(3, 0);
6078 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6079 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6080 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6081
6082 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6083 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6084 IEM_MC_REF_EFLAGS(pEFlags);
6085 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6086
6087 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6088 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6089 IEM_MC_ADVANCE_RIP();
6090 IEM_MC_END();
6091 return VINF_SUCCESS;
6092
6093 case IEMMODE_64BIT:
6094 IEM_MC_BEGIN(3, 0);
6095 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6096 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6097 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6098
6099 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6100 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6101 IEM_MC_REF_EFLAGS(pEFlags);
6102 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6103
6104 IEM_MC_ADVANCE_RIP();
6105 IEM_MC_END();
6106 return VINF_SUCCESS;
6107
6108 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6109 }
6110 }
6111 else
6112 {
6113 /*
6114 * We're accessing memory.
6115 */
6116 switch (pIemCpu->enmEffOpSize)
6117 {
6118 case IEMMODE_16BIT:
6119 IEM_MC_BEGIN(3, 3);
6120 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6121 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6122 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6123 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6125
6126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6127 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6128 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6129 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6130 IEM_MC_FETCH_EFLAGS(EFlags);
6131 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6132 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6133 else
6134 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6135
6136 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6137 IEM_MC_COMMIT_EFLAGS(EFlags);
6138 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16RegCopy);
6139 IEM_MC_ADVANCE_RIP();
6140 IEM_MC_END();
6141 return VINF_SUCCESS;
6142
6143 case IEMMODE_32BIT:
6144 IEM_MC_BEGIN(3, 3);
6145 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6146 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6147 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6148 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6150
6151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6152 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6153 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6154 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6155 IEM_MC_FETCH_EFLAGS(EFlags);
6156 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6157 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6158 else
6159 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6160
6161 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6162 IEM_MC_COMMIT_EFLAGS(EFlags);
6163 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32RegCopy);
6164 IEM_MC_ADVANCE_RIP();
6165 IEM_MC_END();
6166 return VINF_SUCCESS;
6167
6168 case IEMMODE_64BIT:
6169 IEM_MC_BEGIN(3, 3);
6170 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6171 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6172 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6173 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6175
6176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6177 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6178 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6179 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6180 IEM_MC_FETCH_EFLAGS(EFlags);
6181 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6182 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6183 else
6184 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6185
6186 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6187 IEM_MC_COMMIT_EFLAGS(EFlags);
6188 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64RegCopy);
6189 IEM_MC_ADVANCE_RIP();
6190 IEM_MC_END();
6191 return VINF_SUCCESS;
6192
6193 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6194 }
6195 }
6196}
6197
6198/** Opcode 0x0f 0xc2. */
6199FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6200
6201/** Opcode 0x0f 0xc3. */
6202FNIEMOP_STUB(iemOp_movnti_My_Gy);
6203
6204/** Opcode 0x0f 0xc4. */
6205FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6206
6207/** Opcode 0x0f 0xc5. */
6208FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6209
6210/** Opcode 0x0f 0xc6. */
6211FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6212
6213
6214/** Opcode 0x0f 0xc7 !11/1. */
6215FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6216{
6217 IEMOP_MNEMONIC("cmpxchg8b Mq");
6218
6219 IEM_MC_BEGIN(4, 3);
6220 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6221 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6222 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6223 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6224 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6225 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6227
6228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6229 IEMOP_HLP_DONE_DECODING();
6230 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6231
6232 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6233 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6234 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6235
6236 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6237 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6238 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6239
6240 IEM_MC_FETCH_EFLAGS(EFlags);
6241 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6242 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6243 else
6244 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6245
6246 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6247 IEM_MC_COMMIT_EFLAGS(EFlags);
6248 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6249 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6250 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6251 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6252 IEM_MC_ENDIF();
6253 IEM_MC_ADVANCE_RIP();
6254
6255 IEM_MC_END();
6256 return VINF_SUCCESS;
6257}
6258
6259
6260/** Opcode REX.W 0x0f 0xc7 !11/1. */
6261FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
6262
6263/** Opcode 0x0f 0xc7 11/6. */
6264FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6265
6266/** Opcode 0x0f 0xc7 !11/6. */
6267FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6268
6269/** Opcode 0x66 0x0f 0xc7 !11/6. */
6270FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6271
6272/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6273FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6274
6275/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6276FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6277
6278
6279/** Opcode 0x0f 0xc7. */
6280FNIEMOP_DEF(iemOp_Grp9)
6281{
6282 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6283 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6284 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6285 {
6286 case 0: case 2: case 3: case 4: case 5:
6287 return IEMOP_RAISE_INVALID_OPCODE();
6288 case 1:
6289 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6290 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6291 || (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6292 return IEMOP_RAISE_INVALID_OPCODE();
6293 if (bRm & IEM_OP_PRF_SIZE_REX_W)
6294 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6295 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6296 case 6:
6297 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6298 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6299 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6300 {
6301 case 0:
6302 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6303 case IEM_OP_PRF_SIZE_OP:
6304 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6305 case IEM_OP_PRF_REPZ:
6306 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6307 default:
6308 return IEMOP_RAISE_INVALID_OPCODE();
6309 }
6310 case 7:
6311 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6312 {
6313 case 0:
6314 case IEM_OP_PRF_REPZ:
6315 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6316 default:
6317 return IEMOP_RAISE_INVALID_OPCODE();
6318 }
6319 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6320 }
6321}
6322
6323
6324/**
6325 * Common 'bswap register' helper.
6326 */
6327FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6328{
6329 IEMOP_HLP_NO_LOCK_PREFIX();
6330 switch (pIemCpu->enmEffOpSize)
6331 {
6332 case IEMMODE_16BIT:
6333 IEM_MC_BEGIN(1, 0);
6334 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6335 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6336 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6337 IEM_MC_ADVANCE_RIP();
6338 IEM_MC_END();
6339 return VINF_SUCCESS;
6340
6341 case IEMMODE_32BIT:
6342 IEM_MC_BEGIN(1, 0);
6343 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6344 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6345 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6346 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6347 IEM_MC_ADVANCE_RIP();
6348 IEM_MC_END();
6349 return VINF_SUCCESS;
6350
6351 case IEMMODE_64BIT:
6352 IEM_MC_BEGIN(1, 0);
6353 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6354 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6355 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6356 IEM_MC_ADVANCE_RIP();
6357 IEM_MC_END();
6358 return VINF_SUCCESS;
6359
6360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6361 }
6362}
6363
6364
6365/** Opcode 0x0f 0xc8. */
6366FNIEMOP_DEF(iemOp_bswap_rAX_r8)
6367{
6368 IEMOP_MNEMONIC("bswap rAX/r8");
6369 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
6370 prefix. REX.B is the correct prefix it appears. For a parallel
6371 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
6372 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pIemCpu->uRexB);
6373}
6374
6375
6376/** Opcode 0x0f 0xc9. */
6377FNIEMOP_DEF(iemOp_bswap_rCX_r9)
6378{
6379 IEMOP_MNEMONIC("bswap rCX/r9");
6380 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pIemCpu->uRexB);
6381}
6382
6383
6384/** Opcode 0x0f 0xca. */
6385FNIEMOP_DEF(iemOp_bswap_rDX_r10)
6386{
6387 IEMOP_MNEMONIC("bswap rDX/r9");
6388 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pIemCpu->uRexB);
6389}
6390
6391
6392/** Opcode 0x0f 0xcb. */
6393FNIEMOP_DEF(iemOp_bswap_rBX_r11)
6394{
6395 IEMOP_MNEMONIC("bswap rBX/r9");
6396 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pIemCpu->uRexB);
6397}
6398
6399
6400/** Opcode 0x0f 0xcc. */
6401FNIEMOP_DEF(iemOp_bswap_rSP_r12)
6402{
6403 IEMOP_MNEMONIC("bswap rSP/r12");
6404 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pIemCpu->uRexB);
6405}
6406
6407
6408/** Opcode 0x0f 0xcd. */
6409FNIEMOP_DEF(iemOp_bswap_rBP_r13)
6410{
6411 IEMOP_MNEMONIC("bswap rBP/r13");
6412 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pIemCpu->uRexB);
6413}
6414
6415
6416/** Opcode 0x0f 0xce. */
6417FNIEMOP_DEF(iemOp_bswap_rSI_r14)
6418{
6419 IEMOP_MNEMONIC("bswap rSI/r14");
6420 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pIemCpu->uRexB);
6421}
6422
6423
6424/** Opcode 0x0f 0xcf. */
6425FNIEMOP_DEF(iemOp_bswap_rDI_r15)
6426{
6427 IEMOP_MNEMONIC("bswap rDI/r15");
6428 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pIemCpu->uRexB);
6429}
6430
6431
6432
6433/** Opcode 0x0f 0xd0. */
6434FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
6435/** Opcode 0x0f 0xd1. */
6436FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
6437/** Opcode 0x0f 0xd2. */
6438FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
6439/** Opcode 0x0f 0xd3. */
6440FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
6441/** Opcode 0x0f 0xd4. */
6442FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
6443/** Opcode 0x0f 0xd5. */
6444FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
6445/** Opcode 0x0f 0xd6. */
6446FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
6447
6448
6449/** Opcode 0x0f 0xd7. */
6450FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
6451{
6452 /* Docs says register only. */
6453 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6454 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
6455 return IEMOP_RAISE_INVALID_OPCODE();
6456
6457 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
6458 /** @todo testcase: Check that the instruction implicitly clears the high
6459 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
6460 * and opcode modifications are made to work with the whole width (not
6461 * just 128). */
6462 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6463 {
6464 case IEM_OP_PRF_SIZE_OP: /* SSE */
6465 IEMOP_MNEMONIC("pmovmskb Gd,Nq");
6466 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
6467 IEM_MC_BEGIN(2, 0);
6468 IEM_MC_ARG(uint64_t *, pDst, 0);
6469 IEM_MC_ARG(uint128_t const *, pSrc, 1);
6470 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6471 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6472 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6473 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
6474 IEM_MC_ADVANCE_RIP();
6475 IEM_MC_END();
6476 return VINF_SUCCESS;
6477
6478 case 0: /* MMX */
6479 IEMOP_MNEMONIC("pmovmskb Gd,Udq");
6480 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
6481 IEM_MC_BEGIN(2, 0);
6482 IEM_MC_ARG(uint64_t *, pDst, 0);
6483 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6484 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6485 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
6486 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
6487 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
6488 IEM_MC_ADVANCE_RIP();
6489 IEM_MC_END();
6490 return VINF_SUCCESS;
6491
6492 default:
6493 return IEMOP_RAISE_INVALID_OPCODE();
6494 }
6495}
6496
6497
6498/** Opcode 0x0f 0xd8. */
6499FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
6500/** Opcode 0x0f 0xd9. */
6501FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
6502/** Opcode 0x0f 0xda. */
6503FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
6504/** Opcode 0x0f 0xdb. */
6505FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
6506/** Opcode 0x0f 0xdc. */
6507FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
6508/** Opcode 0x0f 0xdd. */
6509FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
6510/** Opcode 0x0f 0xde. */
6511FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
6512/** Opcode 0x0f 0xdf. */
6513FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
6514/** Opcode 0x0f 0xe0. */
6515FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
6516/** Opcode 0x0f 0xe1. */
6517FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
6518/** Opcode 0x0f 0xe2. */
6519FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
6520/** Opcode 0x0f 0xe3. */
6521FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
6522/** Opcode 0x0f 0xe4. */
6523FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
6524/** Opcode 0x0f 0xe5. */
6525FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
6526/** Opcode 0x0f 0xe6. */
6527FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
6528/** Opcode 0x0f 0xe7. */
6529FNIEMOP_STUB(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq);
6530/** Opcode 0x0f 0xe8. */
6531FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
6532/** Opcode 0x0f 0xe9. */
6533FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
6534/** Opcode 0x0f 0xea. */
6535FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
6536/** Opcode 0x0f 0xeb. */
6537FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
6538/** Opcode 0x0f 0xec. */
6539FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
6540/** Opcode 0x0f 0xed. */
6541FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
6542/** Opcode 0x0f 0xee. */
6543FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
6544
6545
6546/** Opcode 0x0f 0xef. */
6547FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
6548{
6549 IEMOP_MNEMONIC("pxor");
6550 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
6551}
6552
6553
6554/** Opcode 0x0f 0xf0. */
6555FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
6556/** Opcode 0x0f 0xf1. */
6557FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
6558/** Opcode 0x0f 0xf2. */
6559FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
6560/** Opcode 0x0f 0xf3. */
6561FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
6562/** Opcode 0x0f 0xf4. */
6563FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
6564/** Opcode 0x0f 0xf5. */
6565FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
6566/** Opcode 0x0f 0xf6. */
6567FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
6568/** Opcode 0x0f 0xf7. */
6569FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
6570/** Opcode 0x0f 0xf8. */
6571FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
6572/** Opcode 0x0f 0xf9. */
6573FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
6574/** Opcode 0x0f 0xfa. */
6575FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
6576/** Opcode 0x0f 0xfb. */
6577FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
6578/** Opcode 0x0f 0xfc. */
6579FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
6580/** Opcode 0x0f 0xfd. */
6581FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
6582/** Opcode 0x0f 0xfe. */
6583FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
6584
6585
6586const PFNIEMOP g_apfnTwoByteMap[256] =
6587{
6588 /* 0x00 */ iemOp_Grp6,
6589 /* 0x01 */ iemOp_Grp7,
6590 /* 0x02 */ iemOp_lar_Gv_Ew,
6591 /* 0x03 */ iemOp_lsl_Gv_Ew,
6592 /* 0x04 */ iemOp_Invalid,
6593 /* 0x05 */ iemOp_syscall,
6594 /* 0x06 */ iemOp_clts,
6595 /* 0x07 */ iemOp_sysret,
6596 /* 0x08 */ iemOp_invd,
6597 /* 0x09 */ iemOp_wbinvd,
6598 /* 0x0a */ iemOp_Invalid,
6599 /* 0x0b */ iemOp_ud2,
6600 /* 0x0c */ iemOp_Invalid,
6601 /* 0x0d */ iemOp_nop_Ev_GrpP,
6602 /* 0x0e */ iemOp_femms,
6603 /* 0x0f */ iemOp_3Dnow,
6604 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
6605 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
6606 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
6607 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
6608 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
6609 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
6610 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
6611 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
6612 /* 0x18 */ iemOp_prefetch_Grp16,
6613 /* 0x19 */ iemOp_nop_Ev,
6614 /* 0x1a */ iemOp_nop_Ev,
6615 /* 0x1b */ iemOp_nop_Ev,
6616 /* 0x1c */ iemOp_nop_Ev,
6617 /* 0x1d */ iemOp_nop_Ev,
6618 /* 0x1e */ iemOp_nop_Ev,
6619 /* 0x1f */ iemOp_nop_Ev,
6620 /* 0x20 */ iemOp_mov_Rd_Cd,
6621 /* 0x21 */ iemOp_mov_Rd_Dd,
6622 /* 0x22 */ iemOp_mov_Cd_Rd,
6623 /* 0x23 */ iemOp_mov_Dd_Rd,
6624 /* 0x24 */ iemOp_mov_Rd_Td,
6625 /* 0x25 */ iemOp_Invalid,
6626 /* 0x26 */ iemOp_mov_Td_Rd,
6627 /* 0x27 */ iemOp_Invalid,
6628 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
6629 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
6630 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
6631 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
6632 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
6633 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
6634 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
6635 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
6636 /* 0x30 */ iemOp_wrmsr,
6637 /* 0x31 */ iemOp_rdtsc,
6638 /* 0x32 */ iemOp_rdmsr,
6639 /* 0x33 */ iemOp_rdpmc,
6640 /* 0x34 */ iemOp_sysenter,
6641 /* 0x35 */ iemOp_sysexit,
6642 /* 0x36 */ iemOp_Invalid,
6643 /* 0x37 */ iemOp_getsec,
6644 /* 0x38 */ iemOp_3byte_Esc_A4,
6645 /* 0x39 */ iemOp_Invalid,
6646 /* 0x3a */ iemOp_3byte_Esc_A5,
6647 /* 0x3b */ iemOp_Invalid,
6648 /* 0x3c */ iemOp_movnti_Gv_Ev/*??*/,
6649 /* 0x3d */ iemOp_Invalid,
6650 /* 0x3e */ iemOp_Invalid,
6651 /* 0x3f */ iemOp_Invalid,
6652 /* 0x40 */ iemOp_cmovo_Gv_Ev,
6653 /* 0x41 */ iemOp_cmovno_Gv_Ev,
6654 /* 0x42 */ iemOp_cmovc_Gv_Ev,
6655 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
6656 /* 0x44 */ iemOp_cmove_Gv_Ev,
6657 /* 0x45 */ iemOp_cmovne_Gv_Ev,
6658 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
6659 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
6660 /* 0x48 */ iemOp_cmovs_Gv_Ev,
6661 /* 0x49 */ iemOp_cmovns_Gv_Ev,
6662 /* 0x4a */ iemOp_cmovp_Gv_Ev,
6663 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
6664 /* 0x4c */ iemOp_cmovl_Gv_Ev,
6665 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
6666 /* 0x4e */ iemOp_cmovle_Gv_Ev,
6667 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
6668 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
6669 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
6670 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
6671 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
6672 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
6673 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
6674 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
6675 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
6676 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
6677 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
6678 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
6679 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
6680 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
6681 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
6682 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
6683 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
6684 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
6685 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
6686 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
6687 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
6688 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
6689 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
6690 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
6691 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
6692 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
6693 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
6694 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
6695 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
6696 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
6697 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
6698 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
6699 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
6700 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
6701 /* 0x71 */ iemOp_Grp12,
6702 /* 0x72 */ iemOp_Grp13,
6703 /* 0x73 */ iemOp_Grp14,
6704 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
6705 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
6706 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
6707 /* 0x77 */ iemOp_emms,
6708 /* 0x78 */ iemOp_vmread_AmdGrp17,
6709 /* 0x79 */ iemOp_vmwrite,
6710 /* 0x7a */ iemOp_Invalid,
6711 /* 0x7b */ iemOp_Invalid,
6712 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
6713 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
6714 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
6715 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
6716 /* 0x80 */ iemOp_jo_Jv,
6717 /* 0x81 */ iemOp_jno_Jv,
6718 /* 0x82 */ iemOp_jc_Jv,
6719 /* 0x83 */ iemOp_jnc_Jv,
6720 /* 0x84 */ iemOp_je_Jv,
6721 /* 0x85 */ iemOp_jne_Jv,
6722 /* 0x86 */ iemOp_jbe_Jv,
6723 /* 0x87 */ iemOp_jnbe_Jv,
6724 /* 0x88 */ iemOp_js_Jv,
6725 /* 0x89 */ iemOp_jns_Jv,
6726 /* 0x8a */ iemOp_jp_Jv,
6727 /* 0x8b */ iemOp_jnp_Jv,
6728 /* 0x8c */ iemOp_jl_Jv,
6729 /* 0x8d */ iemOp_jnl_Jv,
6730 /* 0x8e */ iemOp_jle_Jv,
6731 /* 0x8f */ iemOp_jnle_Jv,
6732 /* 0x90 */ iemOp_seto_Eb,
6733 /* 0x91 */ iemOp_setno_Eb,
6734 /* 0x92 */ iemOp_setc_Eb,
6735 /* 0x93 */ iemOp_setnc_Eb,
6736 /* 0x94 */ iemOp_sete_Eb,
6737 /* 0x95 */ iemOp_setne_Eb,
6738 /* 0x96 */ iemOp_setbe_Eb,
6739 /* 0x97 */ iemOp_setnbe_Eb,
6740 /* 0x98 */ iemOp_sets_Eb,
6741 /* 0x99 */ iemOp_setns_Eb,
6742 /* 0x9a */ iemOp_setp_Eb,
6743 /* 0x9b */ iemOp_setnp_Eb,
6744 /* 0x9c */ iemOp_setl_Eb,
6745 /* 0x9d */ iemOp_setnl_Eb,
6746 /* 0x9e */ iemOp_setle_Eb,
6747 /* 0x9f */ iemOp_setnle_Eb,
6748 /* 0xa0 */ iemOp_push_fs,
6749 /* 0xa1 */ iemOp_pop_fs,
6750 /* 0xa2 */ iemOp_cpuid,
6751 /* 0xa3 */ iemOp_bt_Ev_Gv,
6752 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
6753 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
6754 /* 0xa6 */ iemOp_Invalid,
6755 /* 0xa7 */ iemOp_Invalid,
6756 /* 0xa8 */ iemOp_push_gs,
6757 /* 0xa9 */ iemOp_pop_gs,
6758 /* 0xaa */ iemOp_rsm,
6759 /* 0xab */ iemOp_bts_Ev_Gv,
6760 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
6761 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
6762 /* 0xae */ iemOp_Grp15,
6763 /* 0xaf */ iemOp_imul_Gv_Ev,
6764 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
6765 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
6766 /* 0xb2 */ iemOp_lss_Gv_Mp,
6767 /* 0xb3 */ iemOp_btr_Ev_Gv,
6768 /* 0xb4 */ iemOp_lfs_Gv_Mp,
6769 /* 0xb5 */ iemOp_lgs_Gv_Mp,
6770 /* 0xb6 */ iemOp_movzx_Gv_Eb,
6771 /* 0xb7 */ iemOp_movzx_Gv_Ew,
6772 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
6773 /* 0xb9 */ iemOp_Grp10,
6774 /* 0xba */ iemOp_Grp8,
6775 /* 0xbd */ iemOp_btc_Ev_Gv,
6776 /* 0xbc */ iemOp_bsf_Gv_Ev,
6777 /* 0xbd */ iemOp_bsr_Gv_Ev,
6778 /* 0xbe */ iemOp_movsx_Gv_Eb,
6779 /* 0xbf */ iemOp_movsx_Gv_Ew,
6780 /* 0xc0 */ iemOp_xadd_Eb_Gb,
6781 /* 0xc1 */ iemOp_xadd_Ev_Gv,
6782 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
6783 /* 0xc3 */ iemOp_movnti_My_Gy,
6784 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
6785 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
6786 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
6787 /* 0xc7 */ iemOp_Grp9,
6788 /* 0xc8 */ iemOp_bswap_rAX_r8,
6789 /* 0xc9 */ iemOp_bswap_rCX_r9,
6790 /* 0xca */ iemOp_bswap_rDX_r10,
6791 /* 0xcb */ iemOp_bswap_rBX_r11,
6792 /* 0xcc */ iemOp_bswap_rSP_r12,
6793 /* 0xcd */ iemOp_bswap_rBP_r13,
6794 /* 0xce */ iemOp_bswap_rSI_r14,
6795 /* 0xcf */ iemOp_bswap_rDI_r15,
6796 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
6797 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
6798 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
6799 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
6800 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
6801 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
6802 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
6803 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
6804 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
6805 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
6806 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
6807 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
6808 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
6809 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
6810 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
6811 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
6812 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
6813 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
6814 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
6815 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
6816 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
6817 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
6818 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
6819 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
6820 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
6821 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
6822 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
6823 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
6824 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
6825 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
6826 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
6827 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
6828 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
6829 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
6830 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
6831 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
6832 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
6833 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
6834 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
6835 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
6836 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
6837 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
6838 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
6839 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
6840 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
6841 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
6842 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
6843 /* 0xff */ iemOp_Invalid
6844};
6845
6846/** @} */
6847
6848
6849/** @name One byte opcodes.
6850 *
6851 * @{
6852 */
6853
6854/** Opcode 0x00. */
6855FNIEMOP_DEF(iemOp_add_Eb_Gb)
6856{
6857 IEMOP_MNEMONIC("add Eb,Gb");
6858 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
6859}
6860
6861
6862/** Opcode 0x01. */
6863FNIEMOP_DEF(iemOp_add_Ev_Gv)
6864{
6865 IEMOP_MNEMONIC("add Ev,Gv");
6866 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
6867}
6868
6869
6870/** Opcode 0x02. */
6871FNIEMOP_DEF(iemOp_add_Gb_Eb)
6872{
6873 IEMOP_MNEMONIC("add Gb,Eb");
6874 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
6875}
6876
6877
6878/** Opcode 0x03. */
6879FNIEMOP_DEF(iemOp_add_Gv_Ev)
6880{
6881 IEMOP_MNEMONIC("add Gv,Ev");
6882 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
6883}
6884
6885
6886/** Opcode 0x04. */
6887FNIEMOP_DEF(iemOp_add_Al_Ib)
6888{
6889 IEMOP_MNEMONIC("add al,Ib");
6890 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
6891}
6892
6893
6894/** Opcode 0x05. */
6895FNIEMOP_DEF(iemOp_add_eAX_Iz)
6896{
6897 IEMOP_MNEMONIC("add rAX,Iz");
6898 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
6899}
6900
6901
6902/** Opcode 0x06. */
6903FNIEMOP_DEF(iemOp_push_ES)
6904{
6905 IEMOP_MNEMONIC("push es");
6906 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
6907}
6908
6909
6910/** Opcode 0x07. */
6911FNIEMOP_DEF(iemOp_pop_ES)
6912{
6913 IEMOP_MNEMONIC("pop es");
6914 IEMOP_HLP_NO_64BIT();
6915 IEMOP_HLP_NO_LOCK_PREFIX();
6916 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pIemCpu->enmEffOpSize);
6917}
6918
6919
6920/** Opcode 0x08. */
6921FNIEMOP_DEF(iemOp_or_Eb_Gb)
6922{
6923 IEMOP_MNEMONIC("or Eb,Gb");
6924 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6925 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
6926}
6927
6928
6929/** Opcode 0x09. */
6930FNIEMOP_DEF(iemOp_or_Ev_Gv)
6931{
6932 IEMOP_MNEMONIC("or Ev,Gv ");
6933 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6934 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
6935}
6936
6937
6938/** Opcode 0x0a. */
6939FNIEMOP_DEF(iemOp_or_Gb_Eb)
6940{
6941 IEMOP_MNEMONIC("or Gb,Eb");
6942 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6943 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
6944}
6945
6946
6947/** Opcode 0x0b. */
6948FNIEMOP_DEF(iemOp_or_Gv_Ev)
6949{
6950 IEMOP_MNEMONIC("or Gv,Ev");
6951 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6952 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
6953}
6954
6955
6956/** Opcode 0x0c. */
6957FNIEMOP_DEF(iemOp_or_Al_Ib)
6958{
6959 IEMOP_MNEMONIC("or al,Ib");
6960 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6961 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
6962}
6963
6964
6965/** Opcode 0x0d. */
6966FNIEMOP_DEF(iemOp_or_eAX_Iz)
6967{
6968 IEMOP_MNEMONIC("or rAX,Iz");
6969 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6970 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
6971}
6972
6973
6974/** Opcode 0x0e. */
6975FNIEMOP_DEF(iemOp_push_CS)
6976{
6977 IEMOP_MNEMONIC("push cs");
6978 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
6979}
6980
6981
6982/** Opcode 0x0f. */
6983FNIEMOP_DEF(iemOp_2byteEscape)
6984{
6985 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6986 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
6987}
6988
6989/** Opcode 0x10. */
6990FNIEMOP_DEF(iemOp_adc_Eb_Gb)
6991{
6992 IEMOP_MNEMONIC("adc Eb,Gb");
6993 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
6994}
6995
6996
6997/** Opcode 0x11. */
6998FNIEMOP_DEF(iemOp_adc_Ev_Gv)
6999{
7000 IEMOP_MNEMONIC("adc Ev,Gv");
7001 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7002}
7003
7004
7005/** Opcode 0x12. */
7006FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7007{
7008 IEMOP_MNEMONIC("adc Gb,Eb");
7009 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7010}
7011
7012
7013/** Opcode 0x13. */
7014FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7015{
7016 IEMOP_MNEMONIC("adc Gv,Ev");
7017 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7018}
7019
7020
7021/** Opcode 0x14. */
7022FNIEMOP_DEF(iemOp_adc_Al_Ib)
7023{
7024 IEMOP_MNEMONIC("adc al,Ib");
7025 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7026}
7027
7028
7029/** Opcode 0x15. */
7030FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7031{
7032 IEMOP_MNEMONIC("adc rAX,Iz");
7033 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7034}
7035
7036
7037/** Opcode 0x16. */
7038FNIEMOP_DEF(iemOp_push_SS)
7039{
7040 IEMOP_MNEMONIC("push ss");
7041 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7042}
7043
7044
7045/** Opcode 0x17. */
7046FNIEMOP_DEF(iemOp_pop_SS)
7047{
7048 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
7049 IEMOP_HLP_NO_LOCK_PREFIX();
7050 IEMOP_HLP_NO_64BIT();
7051 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pIemCpu->enmEffOpSize);
7052}
7053
7054
7055/** Opcode 0x18. */
7056FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7057{
7058 IEMOP_MNEMONIC("sbb Eb,Gb");
7059 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7060}
7061
7062
7063/** Opcode 0x19. */
7064FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7065{
7066 IEMOP_MNEMONIC("sbb Ev,Gv");
7067 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7068}
7069
7070
7071/** Opcode 0x1a. */
7072FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7073{
7074 IEMOP_MNEMONIC("sbb Gb,Eb");
7075 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7076}
7077
7078
7079/** Opcode 0x1b. */
7080FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7081{
7082 IEMOP_MNEMONIC("sbb Gv,Ev");
7083 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7084}
7085
7086
7087/** Opcode 0x1c. */
7088FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7089{
7090 IEMOP_MNEMONIC("sbb al,Ib");
7091 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7092}
7093
7094
7095/** Opcode 0x1d. */
7096FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7097{
7098 IEMOP_MNEMONIC("sbb rAX,Iz");
7099 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7100}
7101
7102
7103/** Opcode 0x1e. */
7104FNIEMOP_DEF(iemOp_push_DS)
7105{
7106 IEMOP_MNEMONIC("push ds");
7107 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7108}
7109
7110
7111/** Opcode 0x1f. */
7112FNIEMOP_DEF(iemOp_pop_DS)
7113{
7114 IEMOP_MNEMONIC("pop ds");
7115 IEMOP_HLP_NO_LOCK_PREFIX();
7116 IEMOP_HLP_NO_64BIT();
7117 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pIemCpu->enmEffOpSize);
7118}
7119
7120
7121/** Opcode 0x20. */
7122FNIEMOP_DEF(iemOp_and_Eb_Gb)
7123{
7124 IEMOP_MNEMONIC("and Eb,Gb");
7125 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7126 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7127}
7128
7129
7130/** Opcode 0x21. */
7131FNIEMOP_DEF(iemOp_and_Ev_Gv)
7132{
7133 IEMOP_MNEMONIC("and Ev,Gv");
7134 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7135 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7136}
7137
7138
7139/** Opcode 0x22. */
7140FNIEMOP_DEF(iemOp_and_Gb_Eb)
7141{
7142 IEMOP_MNEMONIC("and Gb,Eb");
7143 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7144 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7145}
7146
7147
7148/** Opcode 0x23. */
7149FNIEMOP_DEF(iemOp_and_Gv_Ev)
7150{
7151 IEMOP_MNEMONIC("and Gv,Ev");
7152 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7153 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7154}
7155
7156
7157/** Opcode 0x24. */
7158FNIEMOP_DEF(iemOp_and_Al_Ib)
7159{
7160 IEMOP_MNEMONIC("and al,Ib");
7161 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7162 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7163}
7164
7165
7166/** Opcode 0x25. */
7167FNIEMOP_DEF(iemOp_and_eAX_Iz)
7168{
7169 IEMOP_MNEMONIC("and rAX,Iz");
7170 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7171 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7172}
7173
7174
7175/** Opcode 0x26. */
7176FNIEMOP_DEF(iemOp_seg_ES)
7177{
7178 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7179 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_ES;
7180 pIemCpu->iEffSeg = X86_SREG_ES;
7181
7182 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7183 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7184}
7185
7186
7187/** Opcode 0x27. */
7188FNIEMOP_STUB(iemOp_daa);
7189
7190
7191/** Opcode 0x28. */
7192FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7193{
7194 IEMOP_MNEMONIC("sub Eb,Gb");
7195 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7196}
7197
7198
7199/** Opcode 0x29. */
7200FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7201{
7202 IEMOP_MNEMONIC("sub Ev,Gv");
7203 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7204}
7205
7206
7207/** Opcode 0x2a. */
7208FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7209{
7210 IEMOP_MNEMONIC("sub Gb,Eb");
7211 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7212}
7213
7214
7215/** Opcode 0x2b. */
7216FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7217{
7218 IEMOP_MNEMONIC("sub Gv,Ev");
7219 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7220}
7221
7222
7223/** Opcode 0x2c. */
7224FNIEMOP_DEF(iemOp_sub_Al_Ib)
7225{
7226 IEMOP_MNEMONIC("sub al,Ib");
7227 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7228}
7229
7230
7231/** Opcode 0x2d. */
7232FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7233{
7234 IEMOP_MNEMONIC("sub rAX,Iz");
7235 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
7236}
7237
7238
7239/** Opcode 0x2e. */
7240FNIEMOP_DEF(iemOp_seg_CS)
7241{
7242 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
7243 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_CS;
7244 pIemCpu->iEffSeg = X86_SREG_CS;
7245
7246 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7247 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7248}
7249
7250
7251/** Opcode 0x2f. */
7252FNIEMOP_STUB(iemOp_das);
7253
7254
7255/** Opcode 0x30. */
7256FNIEMOP_DEF(iemOp_xor_Eb_Gb)
7257{
7258 IEMOP_MNEMONIC("xor Eb,Gb");
7259 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7260 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
7261}
7262
7263
7264/** Opcode 0x31. */
7265FNIEMOP_DEF(iemOp_xor_Ev_Gv)
7266{
7267 IEMOP_MNEMONIC("xor Ev,Gv");
7268 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7269 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
7270}
7271
7272
7273/** Opcode 0x32. */
7274FNIEMOP_DEF(iemOp_xor_Gb_Eb)
7275{
7276 IEMOP_MNEMONIC("xor Gb,Eb");
7277 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7278 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
7279}
7280
7281
7282/** Opcode 0x33. */
7283FNIEMOP_DEF(iemOp_xor_Gv_Ev)
7284{
7285 IEMOP_MNEMONIC("xor Gv,Ev");
7286 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7287 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
7288}
7289
7290
7291/** Opcode 0x34. */
7292FNIEMOP_DEF(iemOp_xor_Al_Ib)
7293{
7294 IEMOP_MNEMONIC("xor al,Ib");
7295 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7296 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
7297}
7298
7299
7300/** Opcode 0x35. */
7301FNIEMOP_DEF(iemOp_xor_eAX_Iz)
7302{
7303 IEMOP_MNEMONIC("xor rAX,Iz");
7304 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7305 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
7306}
7307
7308
7309/** Opcode 0x36. */
7310FNIEMOP_DEF(iemOp_seg_SS)
7311{
7312 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
7313 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_SS;
7314 pIemCpu->iEffSeg = X86_SREG_SS;
7315
7316 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7317 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7318}
7319
7320
7321/** Opcode 0x37. */
7322FNIEMOP_STUB(iemOp_aaa);
7323
7324
7325/** Opcode 0x38. */
7326FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
7327{
7328 IEMOP_MNEMONIC("cmp Eb,Gb");
7329 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7330 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
7331}
7332
7333
7334/** Opcode 0x39. */
7335FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
7336{
7337 IEMOP_MNEMONIC("cmp Ev,Gv");
7338 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7339 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
7340}
7341
7342
7343/** Opcode 0x3a. */
7344FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
7345{
7346 IEMOP_MNEMONIC("cmp Gb,Eb");
7347 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
7348}
7349
7350
7351/** Opcode 0x3b. */
7352FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
7353{
7354 IEMOP_MNEMONIC("cmp Gv,Ev");
7355 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
7356}
7357
7358
7359/** Opcode 0x3c. */
7360FNIEMOP_DEF(iemOp_cmp_Al_Ib)
7361{
7362 IEMOP_MNEMONIC("cmp al,Ib");
7363 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
7364}
7365
7366
7367/** Opcode 0x3d. */
7368FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
7369{
7370 IEMOP_MNEMONIC("cmp rAX,Iz");
7371 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
7372}
7373
7374
7375/** Opcode 0x3e. */
7376FNIEMOP_DEF(iemOp_seg_DS)
7377{
7378 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
7379 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_DS;
7380 pIemCpu->iEffSeg = X86_SREG_DS;
7381
7382 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7383 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7384}
7385
7386
7387/** Opcode 0x3f. */
7388FNIEMOP_STUB(iemOp_aas);
7389
7390/**
7391 * Common 'inc/dec/not/neg register' helper.
7392 */
7393FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
7394{
7395 IEMOP_HLP_NO_LOCK_PREFIX();
7396 switch (pIemCpu->enmEffOpSize)
7397 {
7398 case IEMMODE_16BIT:
7399 IEM_MC_BEGIN(2, 0);
7400 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7401 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7402 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
7403 IEM_MC_REF_EFLAGS(pEFlags);
7404 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
7405 IEM_MC_ADVANCE_RIP();
7406 IEM_MC_END();
7407 return VINF_SUCCESS;
7408
7409 case IEMMODE_32BIT:
7410 IEM_MC_BEGIN(2, 0);
7411 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7412 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7413 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7414 IEM_MC_REF_EFLAGS(pEFlags);
7415 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
7416 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7417 IEM_MC_ADVANCE_RIP();
7418 IEM_MC_END();
7419 return VINF_SUCCESS;
7420
7421 case IEMMODE_64BIT:
7422 IEM_MC_BEGIN(2, 0);
7423 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7424 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7425 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7426 IEM_MC_REF_EFLAGS(pEFlags);
7427 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
7428 IEM_MC_ADVANCE_RIP();
7429 IEM_MC_END();
7430 return VINF_SUCCESS;
7431 }
7432 return VINF_SUCCESS;
7433}
7434
7435
7436/** Opcode 0x40. */
7437FNIEMOP_DEF(iemOp_inc_eAX)
7438{
7439 /*
7440 * This is a REX prefix in 64-bit mode.
7441 */
7442 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7443 {
7444 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
7445 pIemCpu->fPrefixes |= IEM_OP_PRF_REX;
7446
7447 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7448 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7449 }
7450
7451 IEMOP_MNEMONIC("inc eAX");
7452 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
7453}
7454
7455
7456/** Opcode 0x41. */
7457FNIEMOP_DEF(iemOp_inc_eCX)
7458{
7459 /*
7460 * This is a REX prefix in 64-bit mode.
7461 */
7462 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7463 {
7464 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
7465 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
7466 pIemCpu->uRexB = 1 << 3;
7467
7468 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7469 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7470 }
7471
7472 IEMOP_MNEMONIC("inc eCX");
7473 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
7474}
7475
7476
7477/** Opcode 0x42. */
7478FNIEMOP_DEF(iemOp_inc_eDX)
7479{
7480 /*
7481 * This is a REX prefix in 64-bit mode.
7482 */
7483 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7484 {
7485 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
7486 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
7487 pIemCpu->uRexIndex = 1 << 3;
7488
7489 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7490 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7491 }
7492
7493 IEMOP_MNEMONIC("inc eDX");
7494 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
7495}
7496
7497
7498
7499/** Opcode 0x43. */
7500FNIEMOP_DEF(iemOp_inc_eBX)
7501{
7502 /*
7503 * This is a REX prefix in 64-bit mode.
7504 */
7505 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7506 {
7507 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
7508 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7509 pIemCpu->uRexB = 1 << 3;
7510 pIemCpu->uRexIndex = 1 << 3;
7511
7512 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7513 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7514 }
7515
7516 IEMOP_MNEMONIC("inc eBX");
7517 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
7518}
7519
7520
7521/** Opcode 0x44. */
7522FNIEMOP_DEF(iemOp_inc_eSP)
7523{
7524 /*
7525 * This is a REX prefix in 64-bit mode.
7526 */
7527 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7528 {
7529 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
7530 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
7531 pIemCpu->uRexReg = 1 << 3;
7532
7533 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7534 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7535 }
7536
7537 IEMOP_MNEMONIC("inc eSP");
7538 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
7539}
7540
7541
7542/** Opcode 0x45. */
7543FNIEMOP_DEF(iemOp_inc_eBP)
7544{
7545 /*
7546 * This is a REX prefix in 64-bit mode.
7547 */
7548 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7549 {
7550 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
7551 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
7552 pIemCpu->uRexReg = 1 << 3;
7553 pIemCpu->uRexB = 1 << 3;
7554
7555 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7556 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7557 }
7558
7559 IEMOP_MNEMONIC("inc eBP");
7560 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
7561}
7562
7563
7564/** Opcode 0x46. */
7565FNIEMOP_DEF(iemOp_inc_eSI)
7566{
7567 /*
7568 * This is a REX prefix in 64-bit mode.
7569 */
7570 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7571 {
7572 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
7573 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
7574 pIemCpu->uRexReg = 1 << 3;
7575 pIemCpu->uRexIndex = 1 << 3;
7576
7577 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7578 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7579 }
7580
7581 IEMOP_MNEMONIC("inc eSI");
7582 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
7583}
7584
7585
7586/** Opcode 0x47. */
7587FNIEMOP_DEF(iemOp_inc_eDI)
7588{
7589 /*
7590 * This is a REX prefix in 64-bit mode.
7591 */
7592 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7593 {
7594 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
7595 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7596 pIemCpu->uRexReg = 1 << 3;
7597 pIemCpu->uRexB = 1 << 3;
7598 pIemCpu->uRexIndex = 1 << 3;
7599
7600 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7601 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7602 }
7603
7604 IEMOP_MNEMONIC("inc eDI");
7605 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
7606}
7607
7608
7609/** Opcode 0x48. */
7610FNIEMOP_DEF(iemOp_dec_eAX)
7611{
7612 /*
7613 * This is a REX prefix in 64-bit mode.
7614 */
7615 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7616 {
7617 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
7618 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
7619 iemRecalEffOpSize(pIemCpu);
7620
7621 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7622 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7623 }
7624
7625 IEMOP_MNEMONIC("dec eAX");
7626 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
7627}
7628
7629
7630/** Opcode 0x49. */
7631FNIEMOP_DEF(iemOp_dec_eCX)
7632{
7633 /*
7634 * This is a REX prefix in 64-bit mode.
7635 */
7636 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7637 {
7638 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
7639 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7640 pIemCpu->uRexB = 1 << 3;
7641 iemRecalEffOpSize(pIemCpu);
7642
7643 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7644 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7645 }
7646
7647 IEMOP_MNEMONIC("dec eCX");
7648 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
7649}
7650
7651
7652/** Opcode 0x4a. */
7653FNIEMOP_DEF(iemOp_dec_eDX)
7654{
7655 /*
7656 * This is a REX prefix in 64-bit mode.
7657 */
7658 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7659 {
7660 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
7661 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7662 pIemCpu->uRexIndex = 1 << 3;
7663 iemRecalEffOpSize(pIemCpu);
7664
7665 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7666 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7667 }
7668
7669 IEMOP_MNEMONIC("dec eDX");
7670 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
7671}
7672
7673
7674/** Opcode 0x4b. */
7675FNIEMOP_DEF(iemOp_dec_eBX)
7676{
7677 /*
7678 * This is a REX prefix in 64-bit mode.
7679 */
7680 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7681 {
7682 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
7683 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7684 pIemCpu->uRexB = 1 << 3;
7685 pIemCpu->uRexIndex = 1 << 3;
7686 iemRecalEffOpSize(pIemCpu);
7687
7688 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7689 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7690 }
7691
7692 IEMOP_MNEMONIC("dec eBX");
7693 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
7694}
7695
7696
7697/** Opcode 0x4c. */
7698FNIEMOP_DEF(iemOp_dec_eSP)
7699{
7700 /*
7701 * This is a REX prefix in 64-bit mode.
7702 */
7703 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7704 {
7705 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
7706 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
7707 pIemCpu->uRexReg = 1 << 3;
7708 iemRecalEffOpSize(pIemCpu);
7709
7710 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7711 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7712 }
7713
7714 IEMOP_MNEMONIC("dec eSP");
7715 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
7716}
7717
7718
7719/** Opcode 0x4d. */
7720FNIEMOP_DEF(iemOp_dec_eBP)
7721{
7722 /*
7723 * This is a REX prefix in 64-bit mode.
7724 */
7725 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7726 {
7727 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
7728 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7729 pIemCpu->uRexReg = 1 << 3;
7730 pIemCpu->uRexB = 1 << 3;
7731 iemRecalEffOpSize(pIemCpu);
7732
7733 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7734 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7735 }
7736
7737 IEMOP_MNEMONIC("dec eBP");
7738 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
7739}
7740
7741
7742/** Opcode 0x4e. */
7743FNIEMOP_DEF(iemOp_dec_eSI)
7744{
7745 /*
7746 * This is a REX prefix in 64-bit mode.
7747 */
7748 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7749 {
7750 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
7751 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7752 pIemCpu->uRexReg = 1 << 3;
7753 pIemCpu->uRexIndex = 1 << 3;
7754 iemRecalEffOpSize(pIemCpu);
7755
7756 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7757 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7758 }
7759
7760 IEMOP_MNEMONIC("dec eSI");
7761 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
7762}
7763
7764
7765/** Opcode 0x4f. */
7766FNIEMOP_DEF(iemOp_dec_eDI)
7767{
7768 /*
7769 * This is a REX prefix in 64-bit mode.
7770 */
7771 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7772 {
7773 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
7774 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7775 pIemCpu->uRexReg = 1 << 3;
7776 pIemCpu->uRexB = 1 << 3;
7777 pIemCpu->uRexIndex = 1 << 3;
7778 iemRecalEffOpSize(pIemCpu);
7779
7780 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7781 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7782 }
7783
7784 IEMOP_MNEMONIC("dec eDI");
7785 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
7786}
7787
7788
7789/**
7790 * Common 'push register' helper.
7791 */
7792FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
7793{
7794 IEMOP_HLP_NO_LOCK_PREFIX();
7795 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7796 {
7797 iReg |= pIemCpu->uRexB;
7798 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
7799 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
7800 }
7801
7802 switch (pIemCpu->enmEffOpSize)
7803 {
7804 case IEMMODE_16BIT:
7805 IEM_MC_BEGIN(0, 1);
7806 IEM_MC_LOCAL(uint16_t, u16Value);
7807 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
7808 IEM_MC_PUSH_U16(u16Value);
7809 IEM_MC_ADVANCE_RIP();
7810 IEM_MC_END();
7811 break;
7812
7813 case IEMMODE_32BIT:
7814 IEM_MC_BEGIN(0, 1);
7815 IEM_MC_LOCAL(uint32_t, u32Value);
7816 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
7817 IEM_MC_PUSH_U32(u32Value);
7818 IEM_MC_ADVANCE_RIP();
7819 IEM_MC_END();
7820 break;
7821
7822 case IEMMODE_64BIT:
7823 IEM_MC_BEGIN(0, 1);
7824 IEM_MC_LOCAL(uint64_t, u64Value);
7825 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
7826 IEM_MC_PUSH_U64(u64Value);
7827 IEM_MC_ADVANCE_RIP();
7828 IEM_MC_END();
7829 break;
7830 }
7831
7832 return VINF_SUCCESS;
7833}
7834
7835
7836/** Opcode 0x50. */
7837FNIEMOP_DEF(iemOp_push_eAX)
7838{
7839 IEMOP_MNEMONIC("push rAX");
7840 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
7841}
7842
7843
7844/** Opcode 0x51. */
7845FNIEMOP_DEF(iemOp_push_eCX)
7846{
7847 IEMOP_MNEMONIC("push rCX");
7848 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
7849}
7850
7851
7852/** Opcode 0x52. */
7853FNIEMOP_DEF(iemOp_push_eDX)
7854{
7855 IEMOP_MNEMONIC("push rDX");
7856 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
7857}
7858
7859
7860/** Opcode 0x53. */
7861FNIEMOP_DEF(iemOp_push_eBX)
7862{
7863 IEMOP_MNEMONIC("push rBX");
7864 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
7865}
7866
7867
7868/** Opcode 0x54. */
7869FNIEMOP_DEF(iemOp_push_eSP)
7870{
7871 IEMOP_MNEMONIC("push rSP");
7872 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
7873}
7874
7875
7876/** Opcode 0x55. */
7877FNIEMOP_DEF(iemOp_push_eBP)
7878{
7879 IEMOP_MNEMONIC("push rBP");
7880 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
7881}
7882
7883
7884/** Opcode 0x56. */
7885FNIEMOP_DEF(iemOp_push_eSI)
7886{
7887 IEMOP_MNEMONIC("push rSI");
7888 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
7889}
7890
7891
7892/** Opcode 0x57. */
7893FNIEMOP_DEF(iemOp_push_eDI)
7894{
7895 IEMOP_MNEMONIC("push rDI");
7896 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
7897}
7898
7899
7900/**
7901 * Common 'pop register' helper.
7902 */
7903FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
7904{
7905 IEMOP_HLP_NO_LOCK_PREFIX();
7906 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7907 {
7908 iReg |= pIemCpu->uRexB;
7909 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
7910 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
7911 }
7912
7913 switch (pIemCpu->enmEffOpSize)
7914 {
7915 case IEMMODE_16BIT:
7916 IEM_MC_BEGIN(0, 1);
7917 IEM_MC_LOCAL(uint16_t, *pu16Dst);
7918 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
7919 IEM_MC_POP_U16(pu16Dst);
7920 IEM_MC_ADVANCE_RIP();
7921 IEM_MC_END();
7922 break;
7923
7924 case IEMMODE_32BIT:
7925 IEM_MC_BEGIN(0, 1);
7926 IEM_MC_LOCAL(uint32_t, *pu32Dst);
7927 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7928 IEM_MC_POP_U32(pu32Dst);
7929 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
7930 IEM_MC_ADVANCE_RIP();
7931 IEM_MC_END();
7932 break;
7933
7934 case IEMMODE_64BIT:
7935 IEM_MC_BEGIN(0, 1);
7936 IEM_MC_LOCAL(uint64_t, *pu64Dst);
7937 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7938 IEM_MC_POP_U64(pu64Dst);
7939 IEM_MC_ADVANCE_RIP();
7940 IEM_MC_END();
7941 break;
7942 }
7943
7944 return VINF_SUCCESS;
7945}
7946
7947
7948/** Opcode 0x58. */
7949FNIEMOP_DEF(iemOp_pop_eAX)
7950{
7951 IEMOP_MNEMONIC("pop rAX");
7952 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
7953}
7954
7955
7956/** Opcode 0x59. */
7957FNIEMOP_DEF(iemOp_pop_eCX)
7958{
7959 IEMOP_MNEMONIC("pop rCX");
7960 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
7961}
7962
7963
7964/** Opcode 0x5a. */
7965FNIEMOP_DEF(iemOp_pop_eDX)
7966{
7967 IEMOP_MNEMONIC("pop rDX");
7968 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
7969}
7970
7971
7972/** Opcode 0x5b. */
7973FNIEMOP_DEF(iemOp_pop_eBX)
7974{
7975 IEMOP_MNEMONIC("pop rBX");
7976 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
7977}
7978
7979
7980/** Opcode 0x5c. */
7981FNIEMOP_DEF(iemOp_pop_eSP)
7982{
7983 IEMOP_MNEMONIC("pop rSP");
7984 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7985 {
7986 if (pIemCpu->uRexB)
7987 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
7988 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
7989 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
7990 }
7991
7992 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
7993 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
7994 /** @todo add testcase for this instruction. */
7995 switch (pIemCpu->enmEffOpSize)
7996 {
7997 case IEMMODE_16BIT:
7998 IEM_MC_BEGIN(0, 1);
7999 IEM_MC_LOCAL(uint16_t, u16Dst);
8000 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8001 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8002 IEM_MC_ADVANCE_RIP();
8003 IEM_MC_END();
8004 break;
8005
8006 case IEMMODE_32BIT:
8007 IEM_MC_BEGIN(0, 1);
8008 IEM_MC_LOCAL(uint32_t, u32Dst);
8009 IEM_MC_POP_U32(&u32Dst);
8010 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8011 IEM_MC_ADVANCE_RIP();
8012 IEM_MC_END();
8013 break;
8014
8015 case IEMMODE_64BIT:
8016 IEM_MC_BEGIN(0, 1);
8017 IEM_MC_LOCAL(uint64_t, u64Dst);
8018 IEM_MC_POP_U64(&u64Dst);
8019 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8020 IEM_MC_ADVANCE_RIP();
8021 IEM_MC_END();
8022 break;
8023 }
8024
8025 return VINF_SUCCESS;
8026}
8027
8028
8029/** Opcode 0x5d. */
8030FNIEMOP_DEF(iemOp_pop_eBP)
8031{
8032 IEMOP_MNEMONIC("pop rBP");
8033 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8034}
8035
8036
8037/** Opcode 0x5e. */
8038FNIEMOP_DEF(iemOp_pop_eSI)
8039{
8040 IEMOP_MNEMONIC("pop rSI");
8041 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8042}
8043
8044
8045/** Opcode 0x5f. */
8046FNIEMOP_DEF(iemOp_pop_eDI)
8047{
8048 IEMOP_MNEMONIC("pop rDI");
8049 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8050}
8051
8052
8053/** Opcode 0x60. */
8054FNIEMOP_DEF(iemOp_pusha)
8055{
8056 IEMOP_MNEMONIC("pusha");
8057 IEMOP_HLP_NO_64BIT();
8058 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8059 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8060 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8061 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8062}
8063
8064
8065/** Opcode 0x61. */
8066FNIEMOP_DEF(iemOp_popa)
8067{
8068 IEMOP_MNEMONIC("popa");
8069 IEMOP_HLP_NO_64BIT();
8070 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8071 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8072 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8073 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8074}
8075
8076
8077/** Opcode 0x62. */
8078FNIEMOP_STUB(iemOp_bound_Gv_Ma);
8079
8080
8081/** Opcode 0x63 - non-64-bit modes. */
8082FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8083{
8084 IEMOP_MNEMONIC("arpl Ew,Gw");
8085 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8086 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8087
8088 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8089 {
8090 /* Register */
8091 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8092 IEM_MC_BEGIN(3, 0);
8093 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8094 IEM_MC_ARG(uint16_t, u16Src, 1);
8095 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8096
8097 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8098 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8099 IEM_MC_REF_EFLAGS(pEFlags);
8100 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8101
8102 IEM_MC_ADVANCE_RIP();
8103 IEM_MC_END();
8104 }
8105 else
8106 {
8107 /* Memory */
8108 IEM_MC_BEGIN(3, 2);
8109 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8110 IEM_MC_ARG(uint16_t, u16Src, 1);
8111 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8113
8114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8115 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8116 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8117 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8118 IEM_MC_FETCH_EFLAGS(EFlags);
8119 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8120
8121 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8122 IEM_MC_COMMIT_EFLAGS(EFlags);
8123 IEM_MC_ADVANCE_RIP();
8124 IEM_MC_END();
8125 }
8126 return VINF_SUCCESS;
8127
8128}
8129
8130
8131/** Opcode 0x63.
8132 * @note This is a weird one. It works like a regular move instruction if
8133 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8134 * @todo This definitely needs a testcase to verify the odd cases. */
8135FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8136{
8137 Assert(pIemCpu->enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8138
8139 IEMOP_MNEMONIC("movsxd Gv,Ev");
8140 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8141
8142 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8143 {
8144 /*
8145 * Register to register.
8146 */
8147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8148 IEM_MC_BEGIN(0, 1);
8149 IEM_MC_LOCAL(uint64_t, u64Value);
8150 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8151 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8152 IEM_MC_ADVANCE_RIP();
8153 IEM_MC_END();
8154 }
8155 else
8156 {
8157 /*
8158 * We're loading a register from memory.
8159 */
8160 IEM_MC_BEGIN(0, 2);
8161 IEM_MC_LOCAL(uint64_t, u64Value);
8162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8165 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
8166 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8167 IEM_MC_ADVANCE_RIP();
8168 IEM_MC_END();
8169 }
8170 return VINF_SUCCESS;
8171}
8172
8173
8174/** Opcode 0x64. */
8175FNIEMOP_DEF(iemOp_seg_FS)
8176{
8177 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8178 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_FS;
8179 pIemCpu->iEffSeg = X86_SREG_FS;
8180
8181 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8182 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8183}
8184
8185
8186/** Opcode 0x65. */
8187FNIEMOP_DEF(iemOp_seg_GS)
8188{
8189 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8190 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_GS;
8191 pIemCpu->iEffSeg = X86_SREG_GS;
8192
8193 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8194 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8195}
8196
8197
8198/** Opcode 0x66. */
8199FNIEMOP_DEF(iemOp_op_size)
8200{
8201 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8202 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_OP;
8203 iemRecalEffOpSize(pIemCpu);
8204
8205 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8206 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8207}
8208
8209
8210/** Opcode 0x67. */
8211FNIEMOP_DEF(iemOp_addr_size)
8212{
8213 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
8214 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
8215 switch (pIemCpu->enmDefAddrMode)
8216 {
8217 case IEMMODE_16BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8218 case IEMMODE_32BIT: pIemCpu->enmEffAddrMode = IEMMODE_16BIT; break;
8219 case IEMMODE_64BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8220 default: AssertFailed();
8221 }
8222
8223 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8224 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8225}
8226
8227
8228/** Opcode 0x68. */
8229FNIEMOP_DEF(iemOp_push_Iz)
8230{
8231 IEMOP_MNEMONIC("push Iz");
8232 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8233 switch (pIemCpu->enmEffOpSize)
8234 {
8235 case IEMMODE_16BIT:
8236 {
8237 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8238 IEMOP_HLP_NO_LOCK_PREFIX();
8239 IEM_MC_BEGIN(0,0);
8240 IEM_MC_PUSH_U16(u16Imm);
8241 IEM_MC_ADVANCE_RIP();
8242 IEM_MC_END();
8243 return VINF_SUCCESS;
8244 }
8245
8246 case IEMMODE_32BIT:
8247 {
8248 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8249 IEMOP_HLP_NO_LOCK_PREFIX();
8250 IEM_MC_BEGIN(0,0);
8251 IEM_MC_PUSH_U32(u32Imm);
8252 IEM_MC_ADVANCE_RIP();
8253 IEM_MC_END();
8254 return VINF_SUCCESS;
8255 }
8256
8257 case IEMMODE_64BIT:
8258 {
8259 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8260 IEMOP_HLP_NO_LOCK_PREFIX();
8261 IEM_MC_BEGIN(0,0);
8262 IEM_MC_PUSH_U64(u64Imm);
8263 IEM_MC_ADVANCE_RIP();
8264 IEM_MC_END();
8265 return VINF_SUCCESS;
8266 }
8267
8268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8269 }
8270}
8271
8272
8273/** Opcode 0x69. */
8274FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
8275{
8276 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
8277 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8278 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8279
8280 switch (pIemCpu->enmEffOpSize)
8281 {
8282 case IEMMODE_16BIT:
8283 {
8284 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8285 {
8286 /* register operand */
8287 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8289
8290 IEM_MC_BEGIN(3, 1);
8291 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8292 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
8293 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8294 IEM_MC_LOCAL(uint16_t, u16Tmp);
8295
8296 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8297 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8298 IEM_MC_REF_EFLAGS(pEFlags);
8299 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8300 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8301
8302 IEM_MC_ADVANCE_RIP();
8303 IEM_MC_END();
8304 }
8305 else
8306 {
8307 /* memory operand */
8308 IEM_MC_BEGIN(3, 2);
8309 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8310 IEM_MC_ARG(uint16_t, u16Src, 1);
8311 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8312 IEM_MC_LOCAL(uint16_t, u16Tmp);
8313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8314
8315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8316 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8317 IEM_MC_ASSIGN(u16Src, u16Imm);
8318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8319 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8320 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8321 IEM_MC_REF_EFLAGS(pEFlags);
8322 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8323 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8324
8325 IEM_MC_ADVANCE_RIP();
8326 IEM_MC_END();
8327 }
8328 return VINF_SUCCESS;
8329 }
8330
8331 case IEMMODE_32BIT:
8332 {
8333 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8334 {
8335 /* register operand */
8336 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8338
8339 IEM_MC_BEGIN(3, 1);
8340 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8341 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
8342 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8343 IEM_MC_LOCAL(uint32_t, u32Tmp);
8344
8345 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8346 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8347 IEM_MC_REF_EFLAGS(pEFlags);
8348 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8349 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8350
8351 IEM_MC_ADVANCE_RIP();
8352 IEM_MC_END();
8353 }
8354 else
8355 {
8356 /* memory operand */
8357 IEM_MC_BEGIN(3, 2);
8358 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8359 IEM_MC_ARG(uint32_t, u32Src, 1);
8360 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8361 IEM_MC_LOCAL(uint32_t, u32Tmp);
8362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8363
8364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8365 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8366 IEM_MC_ASSIGN(u32Src, u32Imm);
8367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8368 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8369 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8370 IEM_MC_REF_EFLAGS(pEFlags);
8371 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8372 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8373
8374 IEM_MC_ADVANCE_RIP();
8375 IEM_MC_END();
8376 }
8377 return VINF_SUCCESS;
8378 }
8379
8380 case IEMMODE_64BIT:
8381 {
8382 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8383 {
8384 /* register operand */
8385 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8387
8388 IEM_MC_BEGIN(3, 1);
8389 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8390 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
8391 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8392 IEM_MC_LOCAL(uint64_t, u64Tmp);
8393
8394 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8395 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8396 IEM_MC_REF_EFLAGS(pEFlags);
8397 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8398 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8399
8400 IEM_MC_ADVANCE_RIP();
8401 IEM_MC_END();
8402 }
8403 else
8404 {
8405 /* memory operand */
8406 IEM_MC_BEGIN(3, 2);
8407 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8408 IEM_MC_ARG(uint64_t, u64Src, 1);
8409 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8410 IEM_MC_LOCAL(uint64_t, u64Tmp);
8411 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8412
8413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8414 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8415 IEM_MC_ASSIGN(u64Src, u64Imm);
8416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8417 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8418 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8419 IEM_MC_REF_EFLAGS(pEFlags);
8420 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8421 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8422
8423 IEM_MC_ADVANCE_RIP();
8424 IEM_MC_END();
8425 }
8426 return VINF_SUCCESS;
8427 }
8428 }
8429 AssertFailedReturn(VERR_INTERNAL_ERROR_3);
8430}
8431
8432
8433/** Opcode 0x6a. */
8434FNIEMOP_DEF(iemOp_push_Ib)
8435{
8436 IEMOP_MNEMONIC("push Ib");
8437 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8438 IEMOP_HLP_NO_LOCK_PREFIX();
8439 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8440
8441 IEM_MC_BEGIN(0,0);
8442 switch (pIemCpu->enmEffOpSize)
8443 {
8444 case IEMMODE_16BIT:
8445 IEM_MC_PUSH_U16(i8Imm);
8446 break;
8447 case IEMMODE_32BIT:
8448 IEM_MC_PUSH_U32(i8Imm);
8449 break;
8450 case IEMMODE_64BIT:
8451 IEM_MC_PUSH_U64(i8Imm);
8452 break;
8453 }
8454 IEM_MC_ADVANCE_RIP();
8455 IEM_MC_END();
8456 return VINF_SUCCESS;
8457}
8458
8459
8460/** Opcode 0x6b. */
8461FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
8462{
8463 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
8464 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8465 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8466
8467 switch (pIemCpu->enmEffOpSize)
8468 {
8469 case IEMMODE_16BIT:
8470 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8471 {
8472 /* register operand */
8473 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8475
8476 IEM_MC_BEGIN(3, 1);
8477 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8478 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
8479 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8480 IEM_MC_LOCAL(uint16_t, u16Tmp);
8481
8482 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8483 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8484 IEM_MC_REF_EFLAGS(pEFlags);
8485 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8486 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8487
8488 IEM_MC_ADVANCE_RIP();
8489 IEM_MC_END();
8490 }
8491 else
8492 {
8493 /* memory operand */
8494 IEM_MC_BEGIN(3, 2);
8495 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8496 IEM_MC_ARG(uint16_t, u16Src, 1);
8497 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8498 IEM_MC_LOCAL(uint16_t, u16Tmp);
8499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8500
8501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8502 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
8503 IEM_MC_ASSIGN(u16Src, u16Imm);
8504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8505 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8506 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8507 IEM_MC_REF_EFLAGS(pEFlags);
8508 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8509 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8510
8511 IEM_MC_ADVANCE_RIP();
8512 IEM_MC_END();
8513 }
8514 return VINF_SUCCESS;
8515
8516 case IEMMODE_32BIT:
8517 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8518 {
8519 /* register operand */
8520 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8522
8523 IEM_MC_BEGIN(3, 1);
8524 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8525 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
8526 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8527 IEM_MC_LOCAL(uint32_t, u32Tmp);
8528
8529 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8530 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8531 IEM_MC_REF_EFLAGS(pEFlags);
8532 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8533 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8534
8535 IEM_MC_ADVANCE_RIP();
8536 IEM_MC_END();
8537 }
8538 else
8539 {
8540 /* memory operand */
8541 IEM_MC_BEGIN(3, 2);
8542 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8543 IEM_MC_ARG(uint32_t, u32Src, 1);
8544 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8545 IEM_MC_LOCAL(uint32_t, u32Tmp);
8546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8547
8548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8549 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
8550 IEM_MC_ASSIGN(u32Src, u32Imm);
8551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8552 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8553 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8554 IEM_MC_REF_EFLAGS(pEFlags);
8555 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8556 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8557
8558 IEM_MC_ADVANCE_RIP();
8559 IEM_MC_END();
8560 }
8561 return VINF_SUCCESS;
8562
8563 case IEMMODE_64BIT:
8564 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8565 {
8566 /* register operand */
8567 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8569
8570 IEM_MC_BEGIN(3, 1);
8571 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8572 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
8573 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8574 IEM_MC_LOCAL(uint64_t, u64Tmp);
8575
8576 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8577 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8578 IEM_MC_REF_EFLAGS(pEFlags);
8579 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8580 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8581
8582 IEM_MC_ADVANCE_RIP();
8583 IEM_MC_END();
8584 }
8585 else
8586 {
8587 /* memory operand */
8588 IEM_MC_BEGIN(3, 2);
8589 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8590 IEM_MC_ARG(uint64_t, u64Src, 1);
8591 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8592 IEM_MC_LOCAL(uint64_t, u64Tmp);
8593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8594
8595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8596 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
8597 IEM_MC_ASSIGN(u64Src, u64Imm);
8598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8599 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8600 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8601 IEM_MC_REF_EFLAGS(pEFlags);
8602 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8603 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8604
8605 IEM_MC_ADVANCE_RIP();
8606 IEM_MC_END();
8607 }
8608 return VINF_SUCCESS;
8609 }
8610 AssertFailedReturn(VERR_INTERNAL_ERROR_3);
8611}
8612
8613
8614/** Opcode 0x6c. */
8615FNIEMOP_DEF(iemOp_insb_Yb_DX)
8616{
8617 IEMOP_HLP_NO_LOCK_PREFIX();
8618 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8619 {
8620 IEMOP_MNEMONIC("rep ins Yb,DX");
8621 switch (pIemCpu->enmEffAddrMode)
8622 {
8623 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
8624 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
8625 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
8626 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8627 }
8628 }
8629 else
8630 {
8631 IEMOP_MNEMONIC("ins Yb,DX");
8632 switch (pIemCpu->enmEffAddrMode)
8633 {
8634 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
8635 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
8636 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
8637 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8638 }
8639 }
8640}
8641
8642
8643/** Opcode 0x6d. */
8644FNIEMOP_DEF(iemOp_inswd_Yv_DX)
8645{
8646 IEMOP_HLP_NO_LOCK_PREFIX();
8647 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
8648 {
8649 IEMOP_MNEMONIC("rep ins Yv,DX");
8650 switch (pIemCpu->enmEffOpSize)
8651 {
8652 case IEMMODE_16BIT:
8653 switch (pIemCpu->enmEffAddrMode)
8654 {
8655 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
8656 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
8657 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
8658 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8659 }
8660 break;
8661 case IEMMODE_64BIT:
8662 case IEMMODE_32BIT:
8663 switch (pIemCpu->enmEffAddrMode)
8664 {
8665 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
8666 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
8667 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
8668 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8669 }
8670 break;
8671 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8672 }
8673 }
8674 else
8675 {
8676 IEMOP_MNEMONIC("ins Yv,DX");
8677 switch (pIemCpu->enmEffOpSize)
8678 {
8679 case IEMMODE_16BIT:
8680 switch (pIemCpu->enmEffAddrMode)
8681 {
8682 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
8683 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
8684 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
8685 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8686 }
8687 break;
8688 case IEMMODE_64BIT:
8689 case IEMMODE_32BIT:
8690 switch (pIemCpu->enmEffAddrMode)
8691 {
8692 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
8693 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
8694 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
8695 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8696 }
8697 break;
8698 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8699 }
8700 }
8701}
8702
8703
8704/** Opcode 0x6e. */
8705FNIEMOP_DEF(iemOp_outsb_Yb_DX)
8706{
8707 IEMOP_HLP_NO_LOCK_PREFIX();
8708 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8709 {
8710 IEMOP_MNEMONIC("rep out DX,Yb");
8711 switch (pIemCpu->enmEffAddrMode)
8712 {
8713 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pIemCpu->iEffSeg, false);
8714 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pIemCpu->iEffSeg, false);
8715 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pIemCpu->iEffSeg, false);
8716 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8717 }
8718 }
8719 else
8720 {
8721 IEMOP_MNEMONIC("out DX,Yb");
8722 switch (pIemCpu->enmEffAddrMode)
8723 {
8724 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pIemCpu->iEffSeg, false);
8725 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pIemCpu->iEffSeg, false);
8726 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pIemCpu->iEffSeg, false);
8727 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8728 }
8729 }
8730}
8731
8732
8733/** Opcode 0x6f. */
8734FNIEMOP_DEF(iemOp_outswd_Yv_DX)
8735{
8736 IEMOP_HLP_NO_LOCK_PREFIX();
8737 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
8738 {
8739 IEMOP_MNEMONIC("rep outs DX,Yv");
8740 switch (pIemCpu->enmEffOpSize)
8741 {
8742 case IEMMODE_16BIT:
8743 switch (pIemCpu->enmEffAddrMode)
8744 {
8745 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pIemCpu->iEffSeg, false);
8746 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pIemCpu->iEffSeg, false);
8747 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pIemCpu->iEffSeg, false);
8748 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8749 }
8750 break;
8751 case IEMMODE_64BIT:
8752 case IEMMODE_32BIT:
8753 switch (pIemCpu->enmEffAddrMode)
8754 {
8755 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pIemCpu->iEffSeg, false);
8756 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pIemCpu->iEffSeg, false);
8757 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pIemCpu->iEffSeg, false);
8758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8759 }
8760 break;
8761 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8762 }
8763 }
8764 else
8765 {
8766 IEMOP_MNEMONIC("outs DX,Yv");
8767 switch (pIemCpu->enmEffOpSize)
8768 {
8769 case IEMMODE_16BIT:
8770 switch (pIemCpu->enmEffAddrMode)
8771 {
8772 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pIemCpu->iEffSeg, false);
8773 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pIemCpu->iEffSeg, false);
8774 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pIemCpu->iEffSeg, false);
8775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8776 }
8777 break;
8778 case IEMMODE_64BIT:
8779 case IEMMODE_32BIT:
8780 switch (pIemCpu->enmEffAddrMode)
8781 {
8782 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pIemCpu->iEffSeg, false);
8783 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pIemCpu->iEffSeg, false);
8784 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pIemCpu->iEffSeg, false);
8785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8786 }
8787 break;
8788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8789 }
8790 }
8791}
8792
8793
8794/** Opcode 0x70. */
8795FNIEMOP_DEF(iemOp_jo_Jb)
8796{
8797 IEMOP_MNEMONIC("jo Jb");
8798 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8799 IEMOP_HLP_NO_LOCK_PREFIX();
8800 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8801
8802 IEM_MC_BEGIN(0, 0);
8803 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8804 IEM_MC_REL_JMP_S8(i8Imm);
8805 } IEM_MC_ELSE() {
8806 IEM_MC_ADVANCE_RIP();
8807 } IEM_MC_ENDIF();
8808 IEM_MC_END();
8809 return VINF_SUCCESS;
8810}
8811
8812
8813/** Opcode 0x71. */
8814FNIEMOP_DEF(iemOp_jno_Jb)
8815{
8816 IEMOP_MNEMONIC("jno Jb");
8817 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8818 IEMOP_HLP_NO_LOCK_PREFIX();
8819 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8820
8821 IEM_MC_BEGIN(0, 0);
8822 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8823 IEM_MC_ADVANCE_RIP();
8824 } IEM_MC_ELSE() {
8825 IEM_MC_REL_JMP_S8(i8Imm);
8826 } IEM_MC_ENDIF();
8827 IEM_MC_END();
8828 return VINF_SUCCESS;
8829}
8830
8831/** Opcode 0x72. */
8832FNIEMOP_DEF(iemOp_jc_Jb)
8833{
8834 IEMOP_MNEMONIC("jc/jnae Jb");
8835 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8836 IEMOP_HLP_NO_LOCK_PREFIX();
8837 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8838
8839 IEM_MC_BEGIN(0, 0);
8840 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8841 IEM_MC_REL_JMP_S8(i8Imm);
8842 } IEM_MC_ELSE() {
8843 IEM_MC_ADVANCE_RIP();
8844 } IEM_MC_ENDIF();
8845 IEM_MC_END();
8846 return VINF_SUCCESS;
8847}
8848
8849
8850/** Opcode 0x73. */
8851FNIEMOP_DEF(iemOp_jnc_Jb)
8852{
8853 IEMOP_MNEMONIC("jnc/jnb Jb");
8854 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8855 IEMOP_HLP_NO_LOCK_PREFIX();
8856 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8857
8858 IEM_MC_BEGIN(0, 0);
8859 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8860 IEM_MC_ADVANCE_RIP();
8861 } IEM_MC_ELSE() {
8862 IEM_MC_REL_JMP_S8(i8Imm);
8863 } IEM_MC_ENDIF();
8864 IEM_MC_END();
8865 return VINF_SUCCESS;
8866}
8867
8868
8869/** Opcode 0x74. */
8870FNIEMOP_DEF(iemOp_je_Jb)
8871{
8872 IEMOP_MNEMONIC("je/jz Jb");
8873 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8874 IEMOP_HLP_NO_LOCK_PREFIX();
8875 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8876
8877 IEM_MC_BEGIN(0, 0);
8878 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8879 IEM_MC_REL_JMP_S8(i8Imm);
8880 } IEM_MC_ELSE() {
8881 IEM_MC_ADVANCE_RIP();
8882 } IEM_MC_ENDIF();
8883 IEM_MC_END();
8884 return VINF_SUCCESS;
8885}
8886
8887
8888/** Opcode 0x75. */
8889FNIEMOP_DEF(iemOp_jne_Jb)
8890{
8891 IEMOP_MNEMONIC("jne/jnz Jb");
8892 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8893 IEMOP_HLP_NO_LOCK_PREFIX();
8894 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8895
8896 IEM_MC_BEGIN(0, 0);
8897 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8898 IEM_MC_ADVANCE_RIP();
8899 } IEM_MC_ELSE() {
8900 IEM_MC_REL_JMP_S8(i8Imm);
8901 } IEM_MC_ENDIF();
8902 IEM_MC_END();
8903 return VINF_SUCCESS;
8904}
8905
8906
8907/** Opcode 0x76. */
8908FNIEMOP_DEF(iemOp_jbe_Jb)
8909{
8910 IEMOP_MNEMONIC("jbe/jna Jb");
8911 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8912 IEMOP_HLP_NO_LOCK_PREFIX();
8913 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8914
8915 IEM_MC_BEGIN(0, 0);
8916 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8917 IEM_MC_REL_JMP_S8(i8Imm);
8918 } IEM_MC_ELSE() {
8919 IEM_MC_ADVANCE_RIP();
8920 } IEM_MC_ENDIF();
8921 IEM_MC_END();
8922 return VINF_SUCCESS;
8923}
8924
8925
8926/** Opcode 0x77. */
8927FNIEMOP_DEF(iemOp_jnbe_Jb)
8928{
8929 IEMOP_MNEMONIC("jnbe/ja Jb");
8930 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8931 IEMOP_HLP_NO_LOCK_PREFIX();
8932 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8933
8934 IEM_MC_BEGIN(0, 0);
8935 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8936 IEM_MC_ADVANCE_RIP();
8937 } IEM_MC_ELSE() {
8938 IEM_MC_REL_JMP_S8(i8Imm);
8939 } IEM_MC_ENDIF();
8940 IEM_MC_END();
8941 return VINF_SUCCESS;
8942}
8943
8944
8945/** Opcode 0x78. */
8946FNIEMOP_DEF(iemOp_js_Jb)
8947{
8948 IEMOP_MNEMONIC("js Jb");
8949 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8950 IEMOP_HLP_NO_LOCK_PREFIX();
8951 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8952
8953 IEM_MC_BEGIN(0, 0);
8954 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8955 IEM_MC_REL_JMP_S8(i8Imm);
8956 } IEM_MC_ELSE() {
8957 IEM_MC_ADVANCE_RIP();
8958 } IEM_MC_ENDIF();
8959 IEM_MC_END();
8960 return VINF_SUCCESS;
8961}
8962
8963
8964/** Opcode 0x79. */
8965FNIEMOP_DEF(iemOp_jns_Jb)
8966{
8967 IEMOP_MNEMONIC("jns Jb");
8968 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8969 IEMOP_HLP_NO_LOCK_PREFIX();
8970 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8971
8972 IEM_MC_BEGIN(0, 0);
8973 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8974 IEM_MC_ADVANCE_RIP();
8975 } IEM_MC_ELSE() {
8976 IEM_MC_REL_JMP_S8(i8Imm);
8977 } IEM_MC_ENDIF();
8978 IEM_MC_END();
8979 return VINF_SUCCESS;
8980}
8981
8982
8983/** Opcode 0x7a. */
8984FNIEMOP_DEF(iemOp_jp_Jb)
8985{
8986 IEMOP_MNEMONIC("jp Jb");
8987 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8988 IEMOP_HLP_NO_LOCK_PREFIX();
8989 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8990
8991 IEM_MC_BEGIN(0, 0);
8992 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
8993 IEM_MC_REL_JMP_S8(i8Imm);
8994 } IEM_MC_ELSE() {
8995 IEM_MC_ADVANCE_RIP();
8996 } IEM_MC_ENDIF();
8997 IEM_MC_END();
8998 return VINF_SUCCESS;
8999}
9000
9001
9002/** Opcode 0x7b. */
9003FNIEMOP_DEF(iemOp_jnp_Jb)
9004{
9005 IEMOP_MNEMONIC("jnp Jb");
9006 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9007 IEMOP_HLP_NO_LOCK_PREFIX();
9008 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9009
9010 IEM_MC_BEGIN(0, 0);
9011 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9012 IEM_MC_ADVANCE_RIP();
9013 } IEM_MC_ELSE() {
9014 IEM_MC_REL_JMP_S8(i8Imm);
9015 } IEM_MC_ENDIF();
9016 IEM_MC_END();
9017 return VINF_SUCCESS;
9018}
9019
9020
9021/** Opcode 0x7c. */
9022FNIEMOP_DEF(iemOp_jl_Jb)
9023{
9024 IEMOP_MNEMONIC("jl/jnge Jb");
9025 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9026 IEMOP_HLP_NO_LOCK_PREFIX();
9027 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9028
9029 IEM_MC_BEGIN(0, 0);
9030 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9031 IEM_MC_REL_JMP_S8(i8Imm);
9032 } IEM_MC_ELSE() {
9033 IEM_MC_ADVANCE_RIP();
9034 } IEM_MC_ENDIF();
9035 IEM_MC_END();
9036 return VINF_SUCCESS;
9037}
9038
9039
9040/** Opcode 0x7d. */
9041FNIEMOP_DEF(iemOp_jnl_Jb)
9042{
9043 IEMOP_MNEMONIC("jnl/jge Jb");
9044 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9045 IEMOP_HLP_NO_LOCK_PREFIX();
9046 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9047
9048 IEM_MC_BEGIN(0, 0);
9049 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9050 IEM_MC_ADVANCE_RIP();
9051 } IEM_MC_ELSE() {
9052 IEM_MC_REL_JMP_S8(i8Imm);
9053 } IEM_MC_ENDIF();
9054 IEM_MC_END();
9055 return VINF_SUCCESS;
9056}
9057
9058
9059/** Opcode 0x7e. */
9060FNIEMOP_DEF(iemOp_jle_Jb)
9061{
9062 IEMOP_MNEMONIC("jle/jng Jb");
9063 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9064 IEMOP_HLP_NO_LOCK_PREFIX();
9065 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9066
9067 IEM_MC_BEGIN(0, 0);
9068 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9069 IEM_MC_REL_JMP_S8(i8Imm);
9070 } IEM_MC_ELSE() {
9071 IEM_MC_ADVANCE_RIP();
9072 } IEM_MC_ENDIF();
9073 IEM_MC_END();
9074 return VINF_SUCCESS;
9075}
9076
9077
9078/** Opcode 0x7f. */
9079FNIEMOP_DEF(iemOp_jnle_Jb)
9080{
9081 IEMOP_MNEMONIC("jnle/jg Jb");
9082 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9083 IEMOP_HLP_NO_LOCK_PREFIX();
9084 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9085
9086 IEM_MC_BEGIN(0, 0);
9087 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9088 IEM_MC_ADVANCE_RIP();
9089 } IEM_MC_ELSE() {
9090 IEM_MC_REL_JMP_S8(i8Imm);
9091 } IEM_MC_ENDIF();
9092 IEM_MC_END();
9093 return VINF_SUCCESS;
9094}
9095
9096
9097/** Opcode 0x80. */
9098FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9099{
9100 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9101 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
9102 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9103
9104 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9105 {
9106 /* register target */
9107 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9108 IEMOP_HLP_NO_LOCK_PREFIX();
9109 IEM_MC_BEGIN(3, 0);
9110 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9111 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9112 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9113
9114 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9115 IEM_MC_REF_EFLAGS(pEFlags);
9116 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9117
9118 IEM_MC_ADVANCE_RIP();
9119 IEM_MC_END();
9120 }
9121 else
9122 {
9123 /* memory target */
9124 uint32_t fAccess;
9125 if (pImpl->pfnLockedU8)
9126 fAccess = IEM_ACCESS_DATA_RW;
9127 else
9128 { /* CMP */
9129 IEMOP_HLP_NO_LOCK_PREFIX();
9130 fAccess = IEM_ACCESS_DATA_R;
9131 }
9132 IEM_MC_BEGIN(3, 2);
9133 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9134 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9136
9137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9138 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9139 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9140
9141 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9142 IEM_MC_FETCH_EFLAGS(EFlags);
9143 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9144 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9145 else
9146 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9147
9148 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9149 IEM_MC_COMMIT_EFLAGS(EFlags);
9150 IEM_MC_ADVANCE_RIP();
9151 IEM_MC_END();
9152 }
9153 return VINF_SUCCESS;
9154}
9155
9156
9157/** Opcode 0x81. */
9158FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9159{
9160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9161 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
9162 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9163
9164 switch (pIemCpu->enmEffOpSize)
9165 {
9166 case IEMMODE_16BIT:
9167 {
9168 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9169 {
9170 /* register target */
9171 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9172 IEMOP_HLP_NO_LOCK_PREFIX();
9173 IEM_MC_BEGIN(3, 0);
9174 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9175 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9176 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9177
9178 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9179 IEM_MC_REF_EFLAGS(pEFlags);
9180 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9181
9182 IEM_MC_ADVANCE_RIP();
9183 IEM_MC_END();
9184 }
9185 else
9186 {
9187 /* memory target */
9188 uint32_t fAccess;
9189 if (pImpl->pfnLockedU16)
9190 fAccess = IEM_ACCESS_DATA_RW;
9191 else
9192 { /* CMP, TEST */
9193 IEMOP_HLP_NO_LOCK_PREFIX();
9194 fAccess = IEM_ACCESS_DATA_R;
9195 }
9196 IEM_MC_BEGIN(3, 2);
9197 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9198 IEM_MC_ARG(uint16_t, u16Src, 1);
9199 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9201
9202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9203 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9204 IEM_MC_ASSIGN(u16Src, u16Imm);
9205 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9206 IEM_MC_FETCH_EFLAGS(EFlags);
9207 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9208 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9209 else
9210 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9211
9212 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9213 IEM_MC_COMMIT_EFLAGS(EFlags);
9214 IEM_MC_ADVANCE_RIP();
9215 IEM_MC_END();
9216 }
9217 break;
9218 }
9219
9220 case IEMMODE_32BIT:
9221 {
9222 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9223 {
9224 /* register target */
9225 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9226 IEMOP_HLP_NO_LOCK_PREFIX();
9227 IEM_MC_BEGIN(3, 0);
9228 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9229 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
9230 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9231
9232 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9233 IEM_MC_REF_EFLAGS(pEFlags);
9234 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9235 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9236
9237 IEM_MC_ADVANCE_RIP();
9238 IEM_MC_END();
9239 }
9240 else
9241 {
9242 /* memory target */
9243 uint32_t fAccess;
9244 if (pImpl->pfnLockedU32)
9245 fAccess = IEM_ACCESS_DATA_RW;
9246 else
9247 { /* CMP, TEST */
9248 IEMOP_HLP_NO_LOCK_PREFIX();
9249 fAccess = IEM_ACCESS_DATA_R;
9250 }
9251 IEM_MC_BEGIN(3, 2);
9252 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9253 IEM_MC_ARG(uint32_t, u32Src, 1);
9254 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9256
9257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9258 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9259 IEM_MC_ASSIGN(u32Src, u32Imm);
9260 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9261 IEM_MC_FETCH_EFLAGS(EFlags);
9262 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9263 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9264 else
9265 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9266
9267 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9268 IEM_MC_COMMIT_EFLAGS(EFlags);
9269 IEM_MC_ADVANCE_RIP();
9270 IEM_MC_END();
9271 }
9272 break;
9273 }
9274
9275 case IEMMODE_64BIT:
9276 {
9277 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9278 {
9279 /* register target */
9280 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9281 IEMOP_HLP_NO_LOCK_PREFIX();
9282 IEM_MC_BEGIN(3, 0);
9283 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9284 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
9285 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9286
9287 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9288 IEM_MC_REF_EFLAGS(pEFlags);
9289 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9290
9291 IEM_MC_ADVANCE_RIP();
9292 IEM_MC_END();
9293 }
9294 else
9295 {
9296 /* memory target */
9297 uint32_t fAccess;
9298 if (pImpl->pfnLockedU64)
9299 fAccess = IEM_ACCESS_DATA_RW;
9300 else
9301 { /* CMP */
9302 IEMOP_HLP_NO_LOCK_PREFIX();
9303 fAccess = IEM_ACCESS_DATA_R;
9304 }
9305 IEM_MC_BEGIN(3, 2);
9306 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9307 IEM_MC_ARG(uint64_t, u64Src, 1);
9308 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9310
9311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9312 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9313 IEM_MC_ASSIGN(u64Src, u64Imm);
9314 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9315 IEM_MC_FETCH_EFLAGS(EFlags);
9316 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9317 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9318 else
9319 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9320
9321 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9322 IEM_MC_COMMIT_EFLAGS(EFlags);
9323 IEM_MC_ADVANCE_RIP();
9324 IEM_MC_END();
9325 }
9326 break;
9327 }
9328 }
9329 return VINF_SUCCESS;
9330}
9331
9332
9333/** Opcode 0x82. */
9334FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
9335{
9336 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
9337 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
9338}
9339
9340
9341/** Opcode 0x83. */
9342FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
9343{
9344 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9345 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
9346 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9347
9348 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9349 {
9350 /*
9351 * Register target
9352 */
9353 IEMOP_HLP_NO_LOCK_PREFIX();
9354 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9355 switch (pIemCpu->enmEffOpSize)
9356 {
9357 case IEMMODE_16BIT:
9358 {
9359 IEM_MC_BEGIN(3, 0);
9360 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9361 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
9362 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9363
9364 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9365 IEM_MC_REF_EFLAGS(pEFlags);
9366 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9367
9368 IEM_MC_ADVANCE_RIP();
9369 IEM_MC_END();
9370 break;
9371 }
9372
9373 case IEMMODE_32BIT:
9374 {
9375 IEM_MC_BEGIN(3, 0);
9376 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9377 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
9378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9379
9380 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9381 IEM_MC_REF_EFLAGS(pEFlags);
9382 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9383 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9384
9385 IEM_MC_ADVANCE_RIP();
9386 IEM_MC_END();
9387 break;
9388 }
9389
9390 case IEMMODE_64BIT:
9391 {
9392 IEM_MC_BEGIN(3, 0);
9393 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9394 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
9395 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9396
9397 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9398 IEM_MC_REF_EFLAGS(pEFlags);
9399 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9400
9401 IEM_MC_ADVANCE_RIP();
9402 IEM_MC_END();
9403 break;
9404 }
9405 }
9406 }
9407 else
9408 {
9409 /*
9410 * Memory target.
9411 */
9412 uint32_t fAccess;
9413 if (pImpl->pfnLockedU16)
9414 fAccess = IEM_ACCESS_DATA_RW;
9415 else
9416 { /* CMP */
9417 IEMOP_HLP_NO_LOCK_PREFIX();
9418 fAccess = IEM_ACCESS_DATA_R;
9419 }
9420
9421 switch (pIemCpu->enmEffOpSize)
9422 {
9423 case IEMMODE_16BIT:
9424 {
9425 IEM_MC_BEGIN(3, 2);
9426 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9427 IEM_MC_ARG(uint16_t, u16Src, 1);
9428 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9430
9431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9432 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9433 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
9434 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9435 IEM_MC_FETCH_EFLAGS(EFlags);
9436 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9437 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9438 else
9439 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9440
9441 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9442 IEM_MC_COMMIT_EFLAGS(EFlags);
9443 IEM_MC_ADVANCE_RIP();
9444 IEM_MC_END();
9445 break;
9446 }
9447
9448 case IEMMODE_32BIT:
9449 {
9450 IEM_MC_BEGIN(3, 2);
9451 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9452 IEM_MC_ARG(uint32_t, u32Src, 1);
9453 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9454 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9455
9456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9457 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9458 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
9459 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9460 IEM_MC_FETCH_EFLAGS(EFlags);
9461 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9462 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9463 else
9464 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9465
9466 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9467 IEM_MC_COMMIT_EFLAGS(EFlags);
9468 IEM_MC_ADVANCE_RIP();
9469 IEM_MC_END();
9470 break;
9471 }
9472
9473 case IEMMODE_64BIT:
9474 {
9475 IEM_MC_BEGIN(3, 2);
9476 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9477 IEM_MC_ARG(uint64_t, u64Src, 1);
9478 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9480
9481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9482 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9483 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
9484 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9485 IEM_MC_FETCH_EFLAGS(EFlags);
9486 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9487 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9488 else
9489 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9490
9491 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9492 IEM_MC_COMMIT_EFLAGS(EFlags);
9493 IEM_MC_ADVANCE_RIP();
9494 IEM_MC_END();
9495 break;
9496 }
9497 }
9498 }
9499 return VINF_SUCCESS;
9500}
9501
9502
9503/** Opcode 0x84. */
9504FNIEMOP_DEF(iemOp_test_Eb_Gb)
9505{
9506 IEMOP_MNEMONIC("test Eb,Gb");
9507 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9508 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9509 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
9510}
9511
9512
9513/** Opcode 0x85. */
9514FNIEMOP_DEF(iemOp_test_Ev_Gv)
9515{
9516 IEMOP_MNEMONIC("test Ev,Gv");
9517 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9518 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9519 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
9520}
9521
9522
9523/** Opcode 0x86. */
9524FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
9525{
9526 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9527 IEMOP_MNEMONIC("xchg Eb,Gb");
9528
9529 /*
9530 * If rm is denoting a register, no more instruction bytes.
9531 */
9532 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9533 {
9534 IEMOP_HLP_NO_LOCK_PREFIX();
9535
9536 IEM_MC_BEGIN(0, 2);
9537 IEM_MC_LOCAL(uint8_t, uTmp1);
9538 IEM_MC_LOCAL(uint8_t, uTmp2);
9539
9540 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9541 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9542 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9543 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9544
9545 IEM_MC_ADVANCE_RIP();
9546 IEM_MC_END();
9547 }
9548 else
9549 {
9550 /*
9551 * We're accessing memory.
9552 */
9553/** @todo the register must be committed separately! */
9554 IEM_MC_BEGIN(2, 2);
9555 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
9556 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9558
9559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9560 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9561 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9562 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
9563 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
9564
9565 IEM_MC_ADVANCE_RIP();
9566 IEM_MC_END();
9567 }
9568 return VINF_SUCCESS;
9569}
9570
9571
9572/** Opcode 0x87. */
9573FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
9574{
9575 IEMOP_MNEMONIC("xchg Ev,Gv");
9576 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9577
9578 /*
9579 * If rm is denoting a register, no more instruction bytes.
9580 */
9581 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9582 {
9583 IEMOP_HLP_NO_LOCK_PREFIX();
9584
9585 switch (pIemCpu->enmEffOpSize)
9586 {
9587 case IEMMODE_16BIT:
9588 IEM_MC_BEGIN(0, 2);
9589 IEM_MC_LOCAL(uint16_t, uTmp1);
9590 IEM_MC_LOCAL(uint16_t, uTmp2);
9591
9592 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9593 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9594 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9595 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9596
9597 IEM_MC_ADVANCE_RIP();
9598 IEM_MC_END();
9599 return VINF_SUCCESS;
9600
9601 case IEMMODE_32BIT:
9602 IEM_MC_BEGIN(0, 2);
9603 IEM_MC_LOCAL(uint32_t, uTmp1);
9604 IEM_MC_LOCAL(uint32_t, uTmp2);
9605
9606 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9607 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9608 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9609 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9610
9611 IEM_MC_ADVANCE_RIP();
9612 IEM_MC_END();
9613 return VINF_SUCCESS;
9614
9615 case IEMMODE_64BIT:
9616 IEM_MC_BEGIN(0, 2);
9617 IEM_MC_LOCAL(uint64_t, uTmp1);
9618 IEM_MC_LOCAL(uint64_t, uTmp2);
9619
9620 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9621 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9622 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9623 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9624
9625 IEM_MC_ADVANCE_RIP();
9626 IEM_MC_END();
9627 return VINF_SUCCESS;
9628
9629 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9630 }
9631 }
9632 else
9633 {
9634 /*
9635 * We're accessing memory.
9636 */
9637 switch (pIemCpu->enmEffOpSize)
9638 {
9639/** @todo the register must be committed separately! */
9640 case IEMMODE_16BIT:
9641 IEM_MC_BEGIN(2, 2);
9642 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
9643 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9645
9646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9647 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9648 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9649 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
9650 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
9651
9652 IEM_MC_ADVANCE_RIP();
9653 IEM_MC_END();
9654 return VINF_SUCCESS;
9655
9656 case IEMMODE_32BIT:
9657 IEM_MC_BEGIN(2, 2);
9658 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
9659 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9661
9662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9663 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9664 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9665 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
9666 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
9667
9668 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
9669 IEM_MC_ADVANCE_RIP();
9670 IEM_MC_END();
9671 return VINF_SUCCESS;
9672
9673 case IEMMODE_64BIT:
9674 IEM_MC_BEGIN(2, 2);
9675 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
9676 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9677 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9678
9679 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9680 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9681 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9682 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
9683 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
9684
9685 IEM_MC_ADVANCE_RIP();
9686 IEM_MC_END();
9687 return VINF_SUCCESS;
9688
9689 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9690 }
9691 }
9692}
9693
9694
9695/** Opcode 0x88. */
9696FNIEMOP_DEF(iemOp_mov_Eb_Gb)
9697{
9698 IEMOP_MNEMONIC("mov Eb,Gb");
9699
9700 uint8_t bRm;
9701 IEM_OPCODE_GET_NEXT_U8(&bRm);
9702 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9703
9704 /*
9705 * If rm is denoting a register, no more instruction bytes.
9706 */
9707 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9708 {
9709 IEM_MC_BEGIN(0, 1);
9710 IEM_MC_LOCAL(uint8_t, u8Value);
9711 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9712 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Value);
9713 IEM_MC_ADVANCE_RIP();
9714 IEM_MC_END();
9715 }
9716 else
9717 {
9718 /*
9719 * We're writing a register to memory.
9720 */
9721 IEM_MC_BEGIN(0, 2);
9722 IEM_MC_LOCAL(uint8_t, u8Value);
9723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9725 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9726 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Value);
9727 IEM_MC_ADVANCE_RIP();
9728 IEM_MC_END();
9729 }
9730 return VINF_SUCCESS;
9731
9732}
9733
9734
9735/** Opcode 0x89. */
9736FNIEMOP_DEF(iemOp_mov_Ev_Gv)
9737{
9738 IEMOP_MNEMONIC("mov Ev,Gv");
9739
9740 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9741 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9742
9743 /*
9744 * If rm is denoting a register, no more instruction bytes.
9745 */
9746 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9747 {
9748 switch (pIemCpu->enmEffOpSize)
9749 {
9750 case IEMMODE_16BIT:
9751 IEM_MC_BEGIN(0, 1);
9752 IEM_MC_LOCAL(uint16_t, u16Value);
9753 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9754 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
9755 IEM_MC_ADVANCE_RIP();
9756 IEM_MC_END();
9757 break;
9758
9759 case IEMMODE_32BIT:
9760 IEM_MC_BEGIN(0, 1);
9761 IEM_MC_LOCAL(uint32_t, u32Value);
9762 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9763 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
9764 IEM_MC_ADVANCE_RIP();
9765 IEM_MC_END();
9766 break;
9767
9768 case IEMMODE_64BIT:
9769 IEM_MC_BEGIN(0, 1);
9770 IEM_MC_LOCAL(uint64_t, u64Value);
9771 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9772 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
9773 IEM_MC_ADVANCE_RIP();
9774 IEM_MC_END();
9775 break;
9776 }
9777 }
9778 else
9779 {
9780 /*
9781 * We're writing a register to memory.
9782 */
9783 switch (pIemCpu->enmEffOpSize)
9784 {
9785 case IEMMODE_16BIT:
9786 IEM_MC_BEGIN(0, 2);
9787 IEM_MC_LOCAL(uint16_t, u16Value);
9788 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9790 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9791 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
9792 IEM_MC_ADVANCE_RIP();
9793 IEM_MC_END();
9794 break;
9795
9796 case IEMMODE_32BIT:
9797 IEM_MC_BEGIN(0, 2);
9798 IEM_MC_LOCAL(uint32_t, u32Value);
9799 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9801 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9802 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
9803 IEM_MC_ADVANCE_RIP();
9804 IEM_MC_END();
9805 break;
9806
9807 case IEMMODE_64BIT:
9808 IEM_MC_BEGIN(0, 2);
9809 IEM_MC_LOCAL(uint64_t, u64Value);
9810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9812 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9813 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
9814 IEM_MC_ADVANCE_RIP();
9815 IEM_MC_END();
9816 break;
9817 }
9818 }
9819 return VINF_SUCCESS;
9820}
9821
9822
9823/** Opcode 0x8a. */
9824FNIEMOP_DEF(iemOp_mov_Gb_Eb)
9825{
9826 IEMOP_MNEMONIC("mov Gb,Eb");
9827
9828 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9829 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9830
9831 /*
9832 * If rm is denoting a register, no more instruction bytes.
9833 */
9834 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9835 {
9836 IEM_MC_BEGIN(0, 1);
9837 IEM_MC_LOCAL(uint8_t, u8Value);
9838 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9839 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
9840 IEM_MC_ADVANCE_RIP();
9841 IEM_MC_END();
9842 }
9843 else
9844 {
9845 /*
9846 * We're loading a register from memory.
9847 */
9848 IEM_MC_BEGIN(0, 2);
9849 IEM_MC_LOCAL(uint8_t, u8Value);
9850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9851 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9852 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
9853 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
9854 IEM_MC_ADVANCE_RIP();
9855 IEM_MC_END();
9856 }
9857 return VINF_SUCCESS;
9858}
9859
9860
9861/** Opcode 0x8b. */
9862FNIEMOP_DEF(iemOp_mov_Gv_Ev)
9863{
9864 IEMOP_MNEMONIC("mov Gv,Ev");
9865
9866 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9867 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9868
9869 /*
9870 * If rm is denoting a register, no more instruction bytes.
9871 */
9872 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9873 {
9874 switch (pIemCpu->enmEffOpSize)
9875 {
9876 case IEMMODE_16BIT:
9877 IEM_MC_BEGIN(0, 1);
9878 IEM_MC_LOCAL(uint16_t, u16Value);
9879 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9880 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
9881 IEM_MC_ADVANCE_RIP();
9882 IEM_MC_END();
9883 break;
9884
9885 case IEMMODE_32BIT:
9886 IEM_MC_BEGIN(0, 1);
9887 IEM_MC_LOCAL(uint32_t, u32Value);
9888 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9889 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
9890 IEM_MC_ADVANCE_RIP();
9891 IEM_MC_END();
9892 break;
9893
9894 case IEMMODE_64BIT:
9895 IEM_MC_BEGIN(0, 1);
9896 IEM_MC_LOCAL(uint64_t, u64Value);
9897 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9898 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
9899 IEM_MC_ADVANCE_RIP();
9900 IEM_MC_END();
9901 break;
9902 }
9903 }
9904 else
9905 {
9906 /*
9907 * We're loading a register from memory.
9908 */
9909 switch (pIemCpu->enmEffOpSize)
9910 {
9911 case IEMMODE_16BIT:
9912 IEM_MC_BEGIN(0, 2);
9913 IEM_MC_LOCAL(uint16_t, u16Value);
9914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9916 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
9917 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
9918 IEM_MC_ADVANCE_RIP();
9919 IEM_MC_END();
9920 break;
9921
9922 case IEMMODE_32BIT:
9923 IEM_MC_BEGIN(0, 2);
9924 IEM_MC_LOCAL(uint32_t, u32Value);
9925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9927 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
9928 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
9929 IEM_MC_ADVANCE_RIP();
9930 IEM_MC_END();
9931 break;
9932
9933 case IEMMODE_64BIT:
9934 IEM_MC_BEGIN(0, 2);
9935 IEM_MC_LOCAL(uint64_t, u64Value);
9936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9938 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
9939 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
9940 IEM_MC_ADVANCE_RIP();
9941 IEM_MC_END();
9942 break;
9943 }
9944 }
9945 return VINF_SUCCESS;
9946}
9947
9948
9949/** Opcode 0x63. */
9950FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
9951{
9952 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
9953 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
9954 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
9955 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
9956 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
9957}
9958
9959
9960/** Opcode 0x8c. */
9961FNIEMOP_DEF(iemOp_mov_Ev_Sw)
9962{
9963 IEMOP_MNEMONIC("mov Ev,Sw");
9964
9965 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9966 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9967
9968 /*
9969 * Check that the destination register exists. The REX.R prefix is ignored.
9970 */
9971 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
9972 if ( iSegReg > X86_SREG_GS)
9973 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9974
9975 /*
9976 * If rm is denoting a register, no more instruction bytes.
9977 * In that case, the operand size is respected and the upper bits are
9978 * cleared (starting with some pentium).
9979 */
9980 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9981 {
9982 switch (pIemCpu->enmEffOpSize)
9983 {
9984 case IEMMODE_16BIT:
9985 IEM_MC_BEGIN(0, 1);
9986 IEM_MC_LOCAL(uint16_t, u16Value);
9987 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
9988 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
9989 IEM_MC_ADVANCE_RIP();
9990 IEM_MC_END();
9991 break;
9992
9993 case IEMMODE_32BIT:
9994 IEM_MC_BEGIN(0, 1);
9995 IEM_MC_LOCAL(uint32_t, u32Value);
9996 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
9997 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
9998 IEM_MC_ADVANCE_RIP();
9999 IEM_MC_END();
10000 break;
10001
10002 case IEMMODE_64BIT:
10003 IEM_MC_BEGIN(0, 1);
10004 IEM_MC_LOCAL(uint64_t, u64Value);
10005 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10006 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10007 IEM_MC_ADVANCE_RIP();
10008 IEM_MC_END();
10009 break;
10010 }
10011 }
10012 else
10013 {
10014 /*
10015 * We're saving the register to memory. The access is word sized
10016 * regardless of operand size prefixes.
10017 */
10018#if 0 /* not necessary */
10019 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10020#endif
10021 IEM_MC_BEGIN(0, 2);
10022 IEM_MC_LOCAL(uint16_t, u16Value);
10023 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10024 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10025 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10026 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10027 IEM_MC_ADVANCE_RIP();
10028 IEM_MC_END();
10029 }
10030 return VINF_SUCCESS;
10031}
10032
10033
10034
10035
10036/** Opcode 0x8d. */
10037FNIEMOP_DEF(iemOp_lea_Gv_M)
10038{
10039 IEMOP_MNEMONIC("lea Gv,M");
10040 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10041 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10042 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10043 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10044
10045 switch (pIemCpu->enmEffOpSize)
10046 {
10047 case IEMMODE_16BIT:
10048 IEM_MC_BEGIN(0, 2);
10049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10050 IEM_MC_LOCAL(uint16_t, u16Cast);
10051 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10052 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10053 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Cast);
10054 IEM_MC_ADVANCE_RIP();
10055 IEM_MC_END();
10056 return VINF_SUCCESS;
10057
10058 case IEMMODE_32BIT:
10059 IEM_MC_BEGIN(0, 2);
10060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10061 IEM_MC_LOCAL(uint32_t, u32Cast);
10062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10063 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10064 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Cast);
10065 IEM_MC_ADVANCE_RIP();
10066 IEM_MC_END();
10067 return VINF_SUCCESS;
10068
10069 case IEMMODE_64BIT:
10070 IEM_MC_BEGIN(0, 1);
10071 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10072 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10073 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
10074 IEM_MC_ADVANCE_RIP();
10075 IEM_MC_END();
10076 return VINF_SUCCESS;
10077 }
10078 AssertFailedReturn(VERR_INTERNAL_ERROR_5);
10079}
10080
10081
10082/** Opcode 0x8e. */
10083FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10084{
10085 IEMOP_MNEMONIC("mov Sw,Ev");
10086
10087 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10088 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10089
10090 /*
10091 * The practical operand size is 16-bit.
10092 */
10093#if 0 /* not necessary */
10094 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10095#endif
10096
10097 /*
10098 * Check that the destination register exists and can be used with this
10099 * instruction. The REX.R prefix is ignored.
10100 */
10101 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10102 if ( iSegReg == X86_SREG_CS
10103 || iSegReg > X86_SREG_GS)
10104 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10105
10106 /*
10107 * If rm is denoting a register, no more instruction bytes.
10108 */
10109 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10110 {
10111 IEM_MC_BEGIN(2, 0);
10112 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10113 IEM_MC_ARG(uint16_t, u16Value, 1);
10114 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10115 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10116 IEM_MC_END();
10117 }
10118 else
10119 {
10120 /*
10121 * We're loading the register from memory. The access is word sized
10122 * regardless of operand size prefixes.
10123 */
10124 IEM_MC_BEGIN(2, 1);
10125 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10126 IEM_MC_ARG(uint16_t, u16Value, 1);
10127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10129 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10130 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10131 IEM_MC_END();
10132 }
10133 return VINF_SUCCESS;
10134}
10135
10136
10137/** Opcode 0x8f /0. */
10138FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10139{
10140 /* This bugger is rather annoying as it requires rSP to be updated before
10141 doing the effective address calculations. Will eventually require a
10142 split between the R/M+SIB decoding and the effective address
10143 calculation - which is something that is required for any attempt at
10144 reusing this code for a recompiler. It may also be good to have if we
10145 need to delay #UD exception caused by invalid lock prefixes.
10146
10147 For now, we'll do a mostly safe interpreter-only implementation here. */
10148 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
10149 * now until tests show it's checked.. */
10150 IEMOP_MNEMONIC("pop Ev");
10151 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10152
10153 /* Register access is relatively easy and can share code. */
10154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10155 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10156
10157 /*
10158 * Memory target.
10159 *
10160 * Intel says that RSP is incremented before it's used in any effective
10161 * address calcuations. This means some serious extra annoyance here since
10162 * we decode and calculate the effective address in one step and like to
10163 * delay committing registers till everything is done.
10164 *
10165 * So, we'll decode and calculate the effective address twice. This will
10166 * require some recoding if turned into a recompiler.
10167 */
10168 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
10169
10170#ifndef TST_IEM_CHECK_MC
10171 /* Calc effective address with modified ESP. */
10172 uint8_t const offOpcodeSaved = pIemCpu->offOpcode;
10173 RTGCPTR GCPtrEff;
10174 VBOXSTRICTRC rcStrict;
10175 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10176 if (rcStrict != VINF_SUCCESS)
10177 return rcStrict;
10178 pIemCpu->offOpcode = offOpcodeSaved;
10179
10180 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx);
10181 uint64_t const RspSaved = pCtx->rsp;
10182 switch (pIemCpu->enmEffOpSize)
10183 {
10184 case IEMMODE_16BIT: iemRegAddToRsp(pIemCpu, pCtx, 2); break;
10185 case IEMMODE_32BIT: iemRegAddToRsp(pIemCpu, pCtx, 4); break;
10186 case IEMMODE_64BIT: iemRegAddToRsp(pIemCpu, pCtx, 8); break;
10187 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10188 }
10189 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10190 Assert(rcStrict == VINF_SUCCESS);
10191 pCtx->rsp = RspSaved;
10192
10193 /* Perform the operation - this should be CImpl. */
10194 RTUINT64U TmpRsp;
10195 TmpRsp.u = pCtx->rsp;
10196 switch (pIemCpu->enmEffOpSize)
10197 {
10198 case IEMMODE_16BIT:
10199 {
10200 uint16_t u16Value;
10201 rcStrict = iemMemStackPopU16Ex(pIemCpu, &u16Value, &TmpRsp);
10202 if (rcStrict == VINF_SUCCESS)
10203 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u16Value);
10204 break;
10205 }
10206
10207 case IEMMODE_32BIT:
10208 {
10209 uint32_t u32Value;
10210 rcStrict = iemMemStackPopU32Ex(pIemCpu, &u32Value, &TmpRsp);
10211 if (rcStrict == VINF_SUCCESS)
10212 rcStrict = iemMemStoreDataU32(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u32Value);
10213 break;
10214 }
10215
10216 case IEMMODE_64BIT:
10217 {
10218 uint64_t u64Value;
10219 rcStrict = iemMemStackPopU64Ex(pIemCpu, &u64Value, &TmpRsp);
10220 if (rcStrict == VINF_SUCCESS)
10221 rcStrict = iemMemStoreDataU64(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u64Value);
10222 break;
10223 }
10224
10225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10226 }
10227 if (rcStrict == VINF_SUCCESS)
10228 {
10229 pCtx->rsp = TmpRsp.u;
10230 iemRegUpdateRipAndClearRF(pIemCpu);
10231 }
10232 return rcStrict;
10233
10234#else
10235 return VERR_IEM_IPE_2;
10236#endif
10237}
10238
10239
10240/** Opcode 0x8f. */
10241FNIEMOP_DEF(iemOp_Grp1A)
10242{
10243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10244 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only pop Ev in this group. */
10245 return IEMOP_RAISE_INVALID_OPCODE();
10246 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
10247}
10248
10249
10250/**
10251 * Common 'xchg reg,rAX' helper.
10252 */
10253FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
10254{
10255 IEMOP_HLP_NO_LOCK_PREFIX();
10256
10257 iReg |= pIemCpu->uRexB;
10258 switch (pIemCpu->enmEffOpSize)
10259 {
10260 case IEMMODE_16BIT:
10261 IEM_MC_BEGIN(0, 2);
10262 IEM_MC_LOCAL(uint16_t, u16Tmp1);
10263 IEM_MC_LOCAL(uint16_t, u16Tmp2);
10264 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
10265 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
10266 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
10267 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
10268 IEM_MC_ADVANCE_RIP();
10269 IEM_MC_END();
10270 return VINF_SUCCESS;
10271
10272 case IEMMODE_32BIT:
10273 IEM_MC_BEGIN(0, 2);
10274 IEM_MC_LOCAL(uint32_t, u32Tmp1);
10275 IEM_MC_LOCAL(uint32_t, u32Tmp2);
10276 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
10277 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
10278 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
10279 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
10280 IEM_MC_ADVANCE_RIP();
10281 IEM_MC_END();
10282 return VINF_SUCCESS;
10283
10284 case IEMMODE_64BIT:
10285 IEM_MC_BEGIN(0, 2);
10286 IEM_MC_LOCAL(uint64_t, u64Tmp1);
10287 IEM_MC_LOCAL(uint64_t, u64Tmp2);
10288 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
10289 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
10290 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
10291 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
10292 IEM_MC_ADVANCE_RIP();
10293 IEM_MC_END();
10294 return VINF_SUCCESS;
10295
10296 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10297 }
10298}
10299
10300
10301/** Opcode 0x90. */
10302FNIEMOP_DEF(iemOp_nop)
10303{
10304 /* R8/R8D and RAX/EAX can be exchanged. */
10305 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_B)
10306 {
10307 IEMOP_MNEMONIC("xchg r8,rAX");
10308 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
10309 }
10310
10311 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
10312 IEMOP_MNEMONIC("pause");
10313 else
10314 IEMOP_MNEMONIC("nop");
10315 IEM_MC_BEGIN(0, 0);
10316 IEM_MC_ADVANCE_RIP();
10317 IEM_MC_END();
10318 return VINF_SUCCESS;
10319}
10320
10321
10322/** Opcode 0x91. */
10323FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
10324{
10325 IEMOP_MNEMONIC("xchg rCX,rAX");
10326 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
10327}
10328
10329
10330/** Opcode 0x92. */
10331FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
10332{
10333 IEMOP_MNEMONIC("xchg rDX,rAX");
10334 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
10335}
10336
10337
10338/** Opcode 0x93. */
10339FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
10340{
10341 IEMOP_MNEMONIC("xchg rBX,rAX");
10342 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
10343}
10344
10345
10346/** Opcode 0x94. */
10347FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
10348{
10349 IEMOP_MNEMONIC("xchg rSX,rAX");
10350 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
10351}
10352
10353
10354/** Opcode 0x95. */
10355FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
10356{
10357 IEMOP_MNEMONIC("xchg rBP,rAX");
10358 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
10359}
10360
10361
10362/** Opcode 0x96. */
10363FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
10364{
10365 IEMOP_MNEMONIC("xchg rSI,rAX");
10366 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
10367}
10368
10369
10370/** Opcode 0x97. */
10371FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
10372{
10373 IEMOP_MNEMONIC("xchg rDI,rAX");
10374 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
10375}
10376
10377
10378/** Opcode 0x98. */
10379FNIEMOP_DEF(iemOp_cbw)
10380{
10381 IEMOP_HLP_NO_LOCK_PREFIX();
10382 switch (pIemCpu->enmEffOpSize)
10383 {
10384 case IEMMODE_16BIT:
10385 IEMOP_MNEMONIC("cbw");
10386 IEM_MC_BEGIN(0, 1);
10387 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
10388 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
10389 } IEM_MC_ELSE() {
10390 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
10391 } IEM_MC_ENDIF();
10392 IEM_MC_ADVANCE_RIP();
10393 IEM_MC_END();
10394 return VINF_SUCCESS;
10395
10396 case IEMMODE_32BIT:
10397 IEMOP_MNEMONIC("cwde");
10398 IEM_MC_BEGIN(0, 1);
10399 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10400 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
10401 } IEM_MC_ELSE() {
10402 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
10403 } IEM_MC_ENDIF();
10404 IEM_MC_ADVANCE_RIP();
10405 IEM_MC_END();
10406 return VINF_SUCCESS;
10407
10408 case IEMMODE_64BIT:
10409 IEMOP_MNEMONIC("cdqe");
10410 IEM_MC_BEGIN(0, 1);
10411 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10412 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
10413 } IEM_MC_ELSE() {
10414 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
10415 } IEM_MC_ENDIF();
10416 IEM_MC_ADVANCE_RIP();
10417 IEM_MC_END();
10418 return VINF_SUCCESS;
10419
10420 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10421 }
10422}
10423
10424
10425/** Opcode 0x99. */
10426FNIEMOP_DEF(iemOp_cwd)
10427{
10428 IEMOP_HLP_NO_LOCK_PREFIX();
10429 switch (pIemCpu->enmEffOpSize)
10430 {
10431 case IEMMODE_16BIT:
10432 IEMOP_MNEMONIC("cwd");
10433 IEM_MC_BEGIN(0, 1);
10434 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10435 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
10436 } IEM_MC_ELSE() {
10437 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
10438 } IEM_MC_ENDIF();
10439 IEM_MC_ADVANCE_RIP();
10440 IEM_MC_END();
10441 return VINF_SUCCESS;
10442
10443 case IEMMODE_32BIT:
10444 IEMOP_MNEMONIC("cdq");
10445 IEM_MC_BEGIN(0, 1);
10446 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10447 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
10448 } IEM_MC_ELSE() {
10449 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
10450 } IEM_MC_ENDIF();
10451 IEM_MC_ADVANCE_RIP();
10452 IEM_MC_END();
10453 return VINF_SUCCESS;
10454
10455 case IEMMODE_64BIT:
10456 IEMOP_MNEMONIC("cqo");
10457 IEM_MC_BEGIN(0, 1);
10458 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
10459 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
10460 } IEM_MC_ELSE() {
10461 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
10462 } IEM_MC_ENDIF();
10463 IEM_MC_ADVANCE_RIP();
10464 IEM_MC_END();
10465 return VINF_SUCCESS;
10466
10467 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10468 }
10469}
10470
10471
10472/** Opcode 0x9a. */
10473FNIEMOP_DEF(iemOp_call_Ap)
10474{
10475 IEMOP_MNEMONIC("call Ap");
10476 IEMOP_HLP_NO_64BIT();
10477
10478 /* Decode the far pointer address and pass it on to the far call C implementation. */
10479 uint32_t offSeg;
10480 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
10481 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10482 else
10483 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10484 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10486 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pIemCpu->enmEffOpSize);
10487}
10488
10489
10490/** Opcode 0x9b. (aka fwait) */
10491FNIEMOP_DEF(iemOp_wait)
10492{
10493 IEMOP_MNEMONIC("wait");
10494 IEMOP_HLP_NO_LOCK_PREFIX();
10495
10496 IEM_MC_BEGIN(0, 0);
10497 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10498 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10499 IEM_MC_ADVANCE_RIP();
10500 IEM_MC_END();
10501 return VINF_SUCCESS;
10502}
10503
10504
10505/** Opcode 0x9c. */
10506FNIEMOP_DEF(iemOp_pushf_Fv)
10507{
10508 IEMOP_HLP_NO_LOCK_PREFIX();
10509 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10510 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pIemCpu->enmEffOpSize);
10511}
10512
10513
10514/** Opcode 0x9d. */
10515FNIEMOP_DEF(iemOp_popf_Fv)
10516{
10517 IEMOP_HLP_NO_LOCK_PREFIX();
10518 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10519 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pIemCpu->enmEffOpSize);
10520}
10521
10522
10523/** Opcode 0x9e. */
10524FNIEMOP_DEF(iemOp_sahf)
10525{
10526 IEMOP_MNEMONIC("sahf");
10527 IEMOP_HLP_NO_LOCK_PREFIX();
10528 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10529 && !IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_EXT_FEATURE_ECX_LAHF_SAHF))
10530 return IEMOP_RAISE_INVALID_OPCODE();
10531 IEM_MC_BEGIN(0, 2);
10532 IEM_MC_LOCAL(uint32_t, u32Flags);
10533 IEM_MC_LOCAL(uint32_t, EFlags);
10534 IEM_MC_FETCH_EFLAGS(EFlags);
10535 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
10536 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10537 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
10538 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
10539 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
10540 IEM_MC_COMMIT_EFLAGS(EFlags);
10541 IEM_MC_ADVANCE_RIP();
10542 IEM_MC_END();
10543 return VINF_SUCCESS;
10544}
10545
10546
10547/** Opcode 0x9f. */
10548FNIEMOP_DEF(iemOp_lahf)
10549{
10550 IEMOP_MNEMONIC("lahf");
10551 IEMOP_HLP_NO_LOCK_PREFIX();
10552 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10553 && !IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_EXT_FEATURE_ECX_LAHF_SAHF))
10554 return IEMOP_RAISE_INVALID_OPCODE();
10555 IEM_MC_BEGIN(0, 1);
10556 IEM_MC_LOCAL(uint8_t, u8Flags);
10557 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
10558 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
10559 IEM_MC_ADVANCE_RIP();
10560 IEM_MC_END();
10561 return VINF_SUCCESS;
10562}
10563
10564
10565/**
10566 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
10567 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
10568 * prefixes. Will return on failures.
10569 * @param a_GCPtrMemOff The variable to store the offset in.
10570 */
10571#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
10572 do \
10573 { \
10574 switch (pIemCpu->enmEffAddrMode) \
10575 { \
10576 case IEMMODE_16BIT: \
10577 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
10578 break; \
10579 case IEMMODE_32BIT: \
10580 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
10581 break; \
10582 case IEMMODE_64BIT: \
10583 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
10584 break; \
10585 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10586 } \
10587 IEMOP_HLP_NO_LOCK_PREFIX(); \
10588 } while (0)
10589
10590/** Opcode 0xa0. */
10591FNIEMOP_DEF(iemOp_mov_Al_Ob)
10592{
10593 /*
10594 * Get the offset and fend of lock prefixes.
10595 */
10596 RTGCPTR GCPtrMemOff;
10597 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10598
10599 /*
10600 * Fetch AL.
10601 */
10602 IEM_MC_BEGIN(0,1);
10603 IEM_MC_LOCAL(uint8_t, u8Tmp);
10604 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10605 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10606 IEM_MC_ADVANCE_RIP();
10607 IEM_MC_END();
10608 return VINF_SUCCESS;
10609}
10610
10611
10612/** Opcode 0xa1. */
10613FNIEMOP_DEF(iemOp_mov_rAX_Ov)
10614{
10615 /*
10616 * Get the offset and fend of lock prefixes.
10617 */
10618 IEMOP_MNEMONIC("mov rAX,Ov");
10619 RTGCPTR GCPtrMemOff;
10620 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10621
10622 /*
10623 * Fetch rAX.
10624 */
10625 switch (pIemCpu->enmEffOpSize)
10626 {
10627 case IEMMODE_16BIT:
10628 IEM_MC_BEGIN(0,1);
10629 IEM_MC_LOCAL(uint16_t, u16Tmp);
10630 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10631 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
10632 IEM_MC_ADVANCE_RIP();
10633 IEM_MC_END();
10634 return VINF_SUCCESS;
10635
10636 case IEMMODE_32BIT:
10637 IEM_MC_BEGIN(0,1);
10638 IEM_MC_LOCAL(uint32_t, u32Tmp);
10639 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10640 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
10641 IEM_MC_ADVANCE_RIP();
10642 IEM_MC_END();
10643 return VINF_SUCCESS;
10644
10645 case IEMMODE_64BIT:
10646 IEM_MC_BEGIN(0,1);
10647 IEM_MC_LOCAL(uint64_t, u64Tmp);
10648 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10649 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
10650 IEM_MC_ADVANCE_RIP();
10651 IEM_MC_END();
10652 return VINF_SUCCESS;
10653
10654 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10655 }
10656}
10657
10658
10659/** Opcode 0xa2. */
10660FNIEMOP_DEF(iemOp_mov_Ob_AL)
10661{
10662 /*
10663 * Get the offset and fend of lock prefixes.
10664 */
10665 RTGCPTR GCPtrMemOff;
10666 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10667
10668 /*
10669 * Store AL.
10670 */
10671 IEM_MC_BEGIN(0,1);
10672 IEM_MC_LOCAL(uint8_t, u8Tmp);
10673 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
10674 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrMemOff, u8Tmp);
10675 IEM_MC_ADVANCE_RIP();
10676 IEM_MC_END();
10677 return VINF_SUCCESS;
10678}
10679
10680
10681/** Opcode 0xa3. */
10682FNIEMOP_DEF(iemOp_mov_Ov_rAX)
10683{
10684 /*
10685 * Get the offset and fend of lock prefixes.
10686 */
10687 RTGCPTR GCPtrMemOff;
10688 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10689
10690 /*
10691 * Store rAX.
10692 */
10693 switch (pIemCpu->enmEffOpSize)
10694 {
10695 case IEMMODE_16BIT:
10696 IEM_MC_BEGIN(0,1);
10697 IEM_MC_LOCAL(uint16_t, u16Tmp);
10698 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
10699 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrMemOff, u16Tmp);
10700 IEM_MC_ADVANCE_RIP();
10701 IEM_MC_END();
10702 return VINF_SUCCESS;
10703
10704 case IEMMODE_32BIT:
10705 IEM_MC_BEGIN(0,1);
10706 IEM_MC_LOCAL(uint32_t, u32Tmp);
10707 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
10708 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrMemOff, u32Tmp);
10709 IEM_MC_ADVANCE_RIP();
10710 IEM_MC_END();
10711 return VINF_SUCCESS;
10712
10713 case IEMMODE_64BIT:
10714 IEM_MC_BEGIN(0,1);
10715 IEM_MC_LOCAL(uint64_t, u64Tmp);
10716 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
10717 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrMemOff, u64Tmp);
10718 IEM_MC_ADVANCE_RIP();
10719 IEM_MC_END();
10720 return VINF_SUCCESS;
10721
10722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10723 }
10724}
10725
10726/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
10727#define IEM_MOVS_CASE(ValBits, AddrBits) \
10728 IEM_MC_BEGIN(0, 2); \
10729 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
10730 IEM_MC_LOCAL(RTGCPTR, uAddr); \
10731 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
10732 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
10733 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
10734 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
10735 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
10736 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10737 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10738 } IEM_MC_ELSE() { \
10739 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10740 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10741 } IEM_MC_ENDIF(); \
10742 IEM_MC_ADVANCE_RIP(); \
10743 IEM_MC_END();
10744
10745/** Opcode 0xa4. */
10746FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
10747{
10748 IEMOP_HLP_NO_LOCK_PREFIX();
10749
10750 /*
10751 * Use the C implementation if a repeat prefix is encountered.
10752 */
10753 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10754 {
10755 IEMOP_MNEMONIC("rep movsb Xb,Yb");
10756 switch (pIemCpu->enmEffAddrMode)
10757 {
10758 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pIemCpu->iEffSeg);
10759 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pIemCpu->iEffSeg);
10760 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pIemCpu->iEffSeg);
10761 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10762 }
10763 }
10764 IEMOP_MNEMONIC("movsb Xb,Yb");
10765
10766 /*
10767 * Sharing case implementation with movs[wdq] below.
10768 */
10769 switch (pIemCpu->enmEffAddrMode)
10770 {
10771 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
10772 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
10773 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
10774 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10775 }
10776 return VINF_SUCCESS;
10777}
10778
10779
10780/** Opcode 0xa5. */
10781FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
10782{
10783 IEMOP_HLP_NO_LOCK_PREFIX();
10784
10785 /*
10786 * Use the C implementation if a repeat prefix is encountered.
10787 */
10788 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10789 {
10790 IEMOP_MNEMONIC("rep movs Xv,Yv");
10791 switch (pIemCpu->enmEffOpSize)
10792 {
10793 case IEMMODE_16BIT:
10794 switch (pIemCpu->enmEffAddrMode)
10795 {
10796 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pIemCpu->iEffSeg);
10797 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pIemCpu->iEffSeg);
10798 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pIemCpu->iEffSeg);
10799 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10800 }
10801 break;
10802 case IEMMODE_32BIT:
10803 switch (pIemCpu->enmEffAddrMode)
10804 {
10805 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pIemCpu->iEffSeg);
10806 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pIemCpu->iEffSeg);
10807 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pIemCpu->iEffSeg);
10808 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10809 }
10810 case IEMMODE_64BIT:
10811 switch (pIemCpu->enmEffAddrMode)
10812 {
10813 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
10814 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pIemCpu->iEffSeg);
10815 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pIemCpu->iEffSeg);
10816 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10817 }
10818 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10819 }
10820 }
10821 IEMOP_MNEMONIC("movs Xv,Yv");
10822
10823 /*
10824 * Annoying double switch here.
10825 * Using ugly macro for implementing the cases, sharing it with movsb.
10826 */
10827 switch (pIemCpu->enmEffOpSize)
10828 {
10829 case IEMMODE_16BIT:
10830 switch (pIemCpu->enmEffAddrMode)
10831 {
10832 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
10833 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
10834 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
10835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10836 }
10837 break;
10838
10839 case IEMMODE_32BIT:
10840 switch (pIemCpu->enmEffAddrMode)
10841 {
10842 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
10843 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
10844 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
10845 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10846 }
10847 break;
10848
10849 case IEMMODE_64BIT:
10850 switch (pIemCpu->enmEffAddrMode)
10851 {
10852 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
10853 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
10854 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
10855 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10856 }
10857 break;
10858 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10859 }
10860 return VINF_SUCCESS;
10861}
10862
10863#undef IEM_MOVS_CASE
10864
10865/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
10866#define IEM_CMPS_CASE(ValBits, AddrBits) \
10867 IEM_MC_BEGIN(3, 3); \
10868 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
10869 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
10870 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10871 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
10872 IEM_MC_LOCAL(RTGCPTR, uAddr); \
10873 \
10874 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
10875 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pIemCpu->iEffSeg, uAddr); \
10876 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
10877 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
10878 IEM_MC_REF_LOCAL(puValue1, uValue1); \
10879 IEM_MC_REF_EFLAGS(pEFlags); \
10880 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
10881 \
10882 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
10883 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10884 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10885 } IEM_MC_ELSE() { \
10886 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10887 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10888 } IEM_MC_ENDIF(); \
10889 IEM_MC_ADVANCE_RIP(); \
10890 IEM_MC_END(); \
10891
10892/** Opcode 0xa6. */
10893FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
10894{
10895 IEMOP_HLP_NO_LOCK_PREFIX();
10896
10897 /*
10898 * Use the C implementation if a repeat prefix is encountered.
10899 */
10900 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
10901 {
10902 IEMOP_MNEMONIC("repe cmps Xb,Yb");
10903 switch (pIemCpu->enmEffAddrMode)
10904 {
10905 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pIemCpu->iEffSeg);
10906 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pIemCpu->iEffSeg);
10907 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pIemCpu->iEffSeg);
10908 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10909 }
10910 }
10911 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
10912 {
10913 IEMOP_MNEMONIC("repe cmps Xb,Yb");
10914 switch (pIemCpu->enmEffAddrMode)
10915 {
10916 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pIemCpu->iEffSeg);
10917 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pIemCpu->iEffSeg);
10918 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pIemCpu->iEffSeg);
10919 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10920 }
10921 }
10922 IEMOP_MNEMONIC("cmps Xb,Yb");
10923
10924 /*
10925 * Sharing case implementation with cmps[wdq] below.
10926 */
10927 switch (pIemCpu->enmEffAddrMode)
10928 {
10929 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
10930 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
10931 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
10932 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10933 }
10934 return VINF_SUCCESS;
10935
10936}
10937
10938
10939/** Opcode 0xa7. */
10940FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
10941{
10942 IEMOP_HLP_NO_LOCK_PREFIX();
10943
10944 /*
10945 * Use the C implementation if a repeat prefix is encountered.
10946 */
10947 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
10948 {
10949 IEMOP_MNEMONIC("repe cmps Xv,Yv");
10950 switch (pIemCpu->enmEffOpSize)
10951 {
10952 case IEMMODE_16BIT:
10953 switch (pIemCpu->enmEffAddrMode)
10954 {
10955 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pIemCpu->iEffSeg);
10956 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pIemCpu->iEffSeg);
10957 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pIemCpu->iEffSeg);
10958 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10959 }
10960 break;
10961 case IEMMODE_32BIT:
10962 switch (pIemCpu->enmEffAddrMode)
10963 {
10964 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pIemCpu->iEffSeg);
10965 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pIemCpu->iEffSeg);
10966 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pIemCpu->iEffSeg);
10967 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10968 }
10969 case IEMMODE_64BIT:
10970 switch (pIemCpu->enmEffAddrMode)
10971 {
10972 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
10973 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pIemCpu->iEffSeg);
10974 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pIemCpu->iEffSeg);
10975 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10976 }
10977 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10978 }
10979 }
10980
10981 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
10982 {
10983 IEMOP_MNEMONIC("repne cmps Xv,Yv");
10984 switch (pIemCpu->enmEffOpSize)
10985 {
10986 case IEMMODE_16BIT:
10987 switch (pIemCpu->enmEffAddrMode)
10988 {
10989 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pIemCpu->iEffSeg);
10990 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pIemCpu->iEffSeg);
10991 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pIemCpu->iEffSeg);
10992 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10993 }
10994 break;
10995 case IEMMODE_32BIT:
10996 switch (pIemCpu->enmEffAddrMode)
10997 {
10998 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pIemCpu->iEffSeg);
10999 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pIemCpu->iEffSeg);
11000 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pIemCpu->iEffSeg);
11001 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11002 }
11003 case IEMMODE_64BIT:
11004 switch (pIemCpu->enmEffAddrMode)
11005 {
11006 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
11007 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pIemCpu->iEffSeg);
11008 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pIemCpu->iEffSeg);
11009 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11010 }
11011 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11012 }
11013 }
11014
11015 IEMOP_MNEMONIC("cmps Xv,Yv");
11016
11017 /*
11018 * Annoying double switch here.
11019 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11020 */
11021 switch (pIemCpu->enmEffOpSize)
11022 {
11023 case IEMMODE_16BIT:
11024 switch (pIemCpu->enmEffAddrMode)
11025 {
11026 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11027 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11028 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11029 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11030 }
11031 break;
11032
11033 case IEMMODE_32BIT:
11034 switch (pIemCpu->enmEffAddrMode)
11035 {
11036 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11037 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11038 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11039 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11040 }
11041 break;
11042
11043 case IEMMODE_64BIT:
11044 switch (pIemCpu->enmEffAddrMode)
11045 {
11046 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
11047 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11048 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11050 }
11051 break;
11052 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11053 }
11054 return VINF_SUCCESS;
11055
11056}
11057
11058#undef IEM_CMPS_CASE
11059
11060/** Opcode 0xa8. */
11061FNIEMOP_DEF(iemOp_test_AL_Ib)
11062{
11063 IEMOP_MNEMONIC("test al,Ib");
11064 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11065 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11066}
11067
11068
11069/** Opcode 0xa9. */
11070FNIEMOP_DEF(iemOp_test_eAX_Iz)
11071{
11072 IEMOP_MNEMONIC("test rAX,Iz");
11073 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11074 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11075}
11076
11077
11078/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11079#define IEM_STOS_CASE(ValBits, AddrBits) \
11080 IEM_MC_BEGIN(0, 2); \
11081 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11082 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11083 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11084 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11085 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11086 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11087 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11088 } IEM_MC_ELSE() { \
11089 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11090 } IEM_MC_ENDIF(); \
11091 IEM_MC_ADVANCE_RIP(); \
11092 IEM_MC_END(); \
11093
11094/** Opcode 0xaa. */
11095FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11096{
11097 IEMOP_HLP_NO_LOCK_PREFIX();
11098
11099 /*
11100 * Use the C implementation if a repeat prefix is encountered.
11101 */
11102 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11103 {
11104 IEMOP_MNEMONIC("rep stos Yb,al");
11105 switch (pIemCpu->enmEffAddrMode)
11106 {
11107 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11108 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11109 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11110 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11111 }
11112 }
11113 IEMOP_MNEMONIC("stos Yb,al");
11114
11115 /*
11116 * Sharing case implementation with stos[wdq] below.
11117 */
11118 switch (pIemCpu->enmEffAddrMode)
11119 {
11120 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11121 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11122 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11123 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11124 }
11125 return VINF_SUCCESS;
11126}
11127
11128
11129/** Opcode 0xab. */
11130FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11131{
11132 IEMOP_HLP_NO_LOCK_PREFIX();
11133
11134 /*
11135 * Use the C implementation if a repeat prefix is encountered.
11136 */
11137 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11138 {
11139 IEMOP_MNEMONIC("rep stos Yv,rAX");
11140 switch (pIemCpu->enmEffOpSize)
11141 {
11142 case IEMMODE_16BIT:
11143 switch (pIemCpu->enmEffAddrMode)
11144 {
11145 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11146 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11147 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
11148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11149 }
11150 break;
11151 case IEMMODE_32BIT:
11152 switch (pIemCpu->enmEffAddrMode)
11153 {
11154 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
11155 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
11156 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
11157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11158 }
11159 case IEMMODE_64BIT:
11160 switch (pIemCpu->enmEffAddrMode)
11161 {
11162 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
11163 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
11164 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
11165 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11166 }
11167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11168 }
11169 }
11170 IEMOP_MNEMONIC("stos Yv,rAX");
11171
11172 /*
11173 * Annoying double switch here.
11174 * Using ugly macro for implementing the cases, sharing it with stosb.
11175 */
11176 switch (pIemCpu->enmEffOpSize)
11177 {
11178 case IEMMODE_16BIT:
11179 switch (pIemCpu->enmEffAddrMode)
11180 {
11181 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
11182 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
11183 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
11184 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11185 }
11186 break;
11187
11188 case IEMMODE_32BIT:
11189 switch (pIemCpu->enmEffAddrMode)
11190 {
11191 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
11192 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
11193 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
11194 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11195 }
11196 break;
11197
11198 case IEMMODE_64BIT:
11199 switch (pIemCpu->enmEffAddrMode)
11200 {
11201 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
11202 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
11203 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
11204 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11205 }
11206 break;
11207 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11208 }
11209 return VINF_SUCCESS;
11210}
11211
11212#undef IEM_STOS_CASE
11213
11214/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
11215#define IEM_LODS_CASE(ValBits, AddrBits) \
11216 IEM_MC_BEGIN(0, 2); \
11217 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11218 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11219 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11220 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11221 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
11222 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11223 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11224 } IEM_MC_ELSE() { \
11225 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11226 } IEM_MC_ENDIF(); \
11227 IEM_MC_ADVANCE_RIP(); \
11228 IEM_MC_END();
11229
11230/** Opcode 0xac. */
11231FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
11232{
11233 IEMOP_HLP_NO_LOCK_PREFIX();
11234
11235 /*
11236 * Use the C implementation if a repeat prefix is encountered.
11237 */
11238 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11239 {
11240 IEMOP_MNEMONIC("rep lodsb al,Xb");
11241 switch (pIemCpu->enmEffAddrMode)
11242 {
11243 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pIemCpu->iEffSeg);
11244 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pIemCpu->iEffSeg);
11245 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pIemCpu->iEffSeg);
11246 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11247 }
11248 }
11249 IEMOP_MNEMONIC("lodsb al,Xb");
11250
11251 /*
11252 * Sharing case implementation with stos[wdq] below.
11253 */
11254 switch (pIemCpu->enmEffAddrMode)
11255 {
11256 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
11257 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
11258 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
11259 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11260 }
11261 return VINF_SUCCESS;
11262}
11263
11264
11265/** Opcode 0xad. */
11266FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
11267{
11268 IEMOP_HLP_NO_LOCK_PREFIX();
11269
11270 /*
11271 * Use the C implementation if a repeat prefix is encountered.
11272 */
11273 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11274 {
11275 IEMOP_MNEMONIC("rep lods rAX,Xv");
11276 switch (pIemCpu->enmEffOpSize)
11277 {
11278 case IEMMODE_16BIT:
11279 switch (pIemCpu->enmEffAddrMode)
11280 {
11281 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pIemCpu->iEffSeg);
11282 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pIemCpu->iEffSeg);
11283 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pIemCpu->iEffSeg);
11284 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11285 }
11286 break;
11287 case IEMMODE_32BIT:
11288 switch (pIemCpu->enmEffAddrMode)
11289 {
11290 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pIemCpu->iEffSeg);
11291 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pIemCpu->iEffSeg);
11292 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pIemCpu->iEffSeg);
11293 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11294 }
11295 case IEMMODE_64BIT:
11296 switch (pIemCpu->enmEffAddrMode)
11297 {
11298 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
11299 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pIemCpu->iEffSeg);
11300 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pIemCpu->iEffSeg);
11301 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11302 }
11303 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11304 }
11305 }
11306 IEMOP_MNEMONIC("lods rAX,Xv");
11307
11308 /*
11309 * Annoying double switch here.
11310 * Using ugly macro for implementing the cases, sharing it with lodsb.
11311 */
11312 switch (pIemCpu->enmEffOpSize)
11313 {
11314 case IEMMODE_16BIT:
11315 switch (pIemCpu->enmEffAddrMode)
11316 {
11317 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
11318 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
11319 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
11320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11321 }
11322 break;
11323
11324 case IEMMODE_32BIT:
11325 switch (pIemCpu->enmEffAddrMode)
11326 {
11327 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
11328 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
11329 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
11330 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11331 }
11332 break;
11333
11334 case IEMMODE_64BIT:
11335 switch (pIemCpu->enmEffAddrMode)
11336 {
11337 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
11338 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
11339 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
11340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11341 }
11342 break;
11343 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11344 }
11345 return VINF_SUCCESS;
11346}
11347
11348#undef IEM_LODS_CASE
11349
11350/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
11351#define IEM_SCAS_CASE(ValBits, AddrBits) \
11352 IEM_MC_BEGIN(3, 2); \
11353 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
11354 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
11355 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11356 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11357 \
11358 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11359 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
11360 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
11361 IEM_MC_REF_EFLAGS(pEFlags); \
11362 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
11363 \
11364 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11365 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11366 } IEM_MC_ELSE() { \
11367 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11368 } IEM_MC_ENDIF(); \
11369 IEM_MC_ADVANCE_RIP(); \
11370 IEM_MC_END();
11371
11372/** Opcode 0xae. */
11373FNIEMOP_DEF(iemOp_scasb_AL_Xb)
11374{
11375 IEMOP_HLP_NO_LOCK_PREFIX();
11376
11377 /*
11378 * Use the C implementation if a repeat prefix is encountered.
11379 */
11380 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11381 {
11382 IEMOP_MNEMONIC("repe scasb al,Xb");
11383 switch (pIemCpu->enmEffAddrMode)
11384 {
11385 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
11386 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
11387 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
11388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11389 }
11390 }
11391 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11392 {
11393 IEMOP_MNEMONIC("repne scasb al,Xb");
11394 switch (pIemCpu->enmEffAddrMode)
11395 {
11396 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
11397 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
11398 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
11399 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11400 }
11401 }
11402 IEMOP_MNEMONIC("scasb al,Xb");
11403
11404 /*
11405 * Sharing case implementation with stos[wdq] below.
11406 */
11407 switch (pIemCpu->enmEffAddrMode)
11408 {
11409 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
11410 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
11411 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
11412 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11413 }
11414 return VINF_SUCCESS;
11415}
11416
11417
11418/** Opcode 0xaf. */
11419FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
11420{
11421 IEMOP_HLP_NO_LOCK_PREFIX();
11422
11423 /*
11424 * Use the C implementation if a repeat prefix is encountered.
11425 */
11426 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11427 {
11428 IEMOP_MNEMONIC("repe scas rAX,Xv");
11429 switch (pIemCpu->enmEffOpSize)
11430 {
11431 case IEMMODE_16BIT:
11432 switch (pIemCpu->enmEffAddrMode)
11433 {
11434 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
11435 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
11436 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
11437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11438 }
11439 break;
11440 case IEMMODE_32BIT:
11441 switch (pIemCpu->enmEffAddrMode)
11442 {
11443 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
11444 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
11445 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
11446 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11447 }
11448 case IEMMODE_64BIT:
11449 switch (pIemCpu->enmEffAddrMode)
11450 {
11451 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
11452 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
11453 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
11454 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11455 }
11456 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11457 }
11458 }
11459 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11460 {
11461 IEMOP_MNEMONIC("repne scas rAX,Xv");
11462 switch (pIemCpu->enmEffOpSize)
11463 {
11464 case IEMMODE_16BIT:
11465 switch (pIemCpu->enmEffAddrMode)
11466 {
11467 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
11468 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
11469 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
11470 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11471 }
11472 break;
11473 case IEMMODE_32BIT:
11474 switch (pIemCpu->enmEffAddrMode)
11475 {
11476 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
11477 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
11478 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
11479 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11480 }
11481 case IEMMODE_64BIT:
11482 switch (pIemCpu->enmEffAddrMode)
11483 {
11484 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
11485 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
11486 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
11487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11488 }
11489 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11490 }
11491 }
11492 IEMOP_MNEMONIC("scas rAX,Xv");
11493
11494 /*
11495 * Annoying double switch here.
11496 * Using ugly macro for implementing the cases, sharing it with scasb.
11497 */
11498 switch (pIemCpu->enmEffOpSize)
11499 {
11500 case IEMMODE_16BIT:
11501 switch (pIemCpu->enmEffAddrMode)
11502 {
11503 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
11504 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
11505 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
11506 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11507 }
11508 break;
11509
11510 case IEMMODE_32BIT:
11511 switch (pIemCpu->enmEffAddrMode)
11512 {
11513 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
11514 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
11515 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
11516 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11517 }
11518 break;
11519
11520 case IEMMODE_64BIT:
11521 switch (pIemCpu->enmEffAddrMode)
11522 {
11523 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
11524 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
11525 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
11526 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11527 }
11528 break;
11529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11530 }
11531 return VINF_SUCCESS;
11532}
11533
11534#undef IEM_SCAS_CASE
11535
11536/**
11537 * Common 'mov r8, imm8' helper.
11538 */
11539FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
11540{
11541 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11542 IEMOP_HLP_NO_LOCK_PREFIX();
11543
11544 IEM_MC_BEGIN(0, 1);
11545 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
11546 IEM_MC_STORE_GREG_U8(iReg, u8Value);
11547 IEM_MC_ADVANCE_RIP();
11548 IEM_MC_END();
11549
11550 return VINF_SUCCESS;
11551}
11552
11553
11554/** Opcode 0xb0. */
11555FNIEMOP_DEF(iemOp_mov_AL_Ib)
11556{
11557 IEMOP_MNEMONIC("mov AL,Ib");
11558 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pIemCpu->uRexB);
11559}
11560
11561
11562/** Opcode 0xb1. */
11563FNIEMOP_DEF(iemOp_CL_Ib)
11564{
11565 IEMOP_MNEMONIC("mov CL,Ib");
11566 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pIemCpu->uRexB);
11567}
11568
11569
11570/** Opcode 0xb2. */
11571FNIEMOP_DEF(iemOp_DL_Ib)
11572{
11573 IEMOP_MNEMONIC("mov DL,Ib");
11574 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pIemCpu->uRexB);
11575}
11576
11577
11578/** Opcode 0xb3. */
11579FNIEMOP_DEF(iemOp_BL_Ib)
11580{
11581 IEMOP_MNEMONIC("mov BL,Ib");
11582 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pIemCpu->uRexB);
11583}
11584
11585
11586/** Opcode 0xb4. */
11587FNIEMOP_DEF(iemOp_mov_AH_Ib)
11588{
11589 IEMOP_MNEMONIC("mov AH,Ib");
11590 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pIemCpu->uRexB);
11591}
11592
11593
11594/** Opcode 0xb5. */
11595FNIEMOP_DEF(iemOp_CH_Ib)
11596{
11597 IEMOP_MNEMONIC("mov CH,Ib");
11598 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pIemCpu->uRexB);
11599}
11600
11601
11602/** Opcode 0xb6. */
11603FNIEMOP_DEF(iemOp_DH_Ib)
11604{
11605 IEMOP_MNEMONIC("mov DH,Ib");
11606 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pIemCpu->uRexB);
11607}
11608
11609
11610/** Opcode 0xb7. */
11611FNIEMOP_DEF(iemOp_BH_Ib)
11612{
11613 IEMOP_MNEMONIC("mov BH,Ib");
11614 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pIemCpu->uRexB);
11615}
11616
11617
11618/**
11619 * Common 'mov regX,immX' helper.
11620 */
11621FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
11622{
11623 switch (pIemCpu->enmEffOpSize)
11624 {
11625 case IEMMODE_16BIT:
11626 {
11627 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11628 IEMOP_HLP_NO_LOCK_PREFIX();
11629
11630 IEM_MC_BEGIN(0, 1);
11631 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
11632 IEM_MC_STORE_GREG_U16(iReg, u16Value);
11633 IEM_MC_ADVANCE_RIP();
11634 IEM_MC_END();
11635 break;
11636 }
11637
11638 case IEMMODE_32BIT:
11639 {
11640 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11641 IEMOP_HLP_NO_LOCK_PREFIX();
11642
11643 IEM_MC_BEGIN(0, 1);
11644 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
11645 IEM_MC_STORE_GREG_U32(iReg, u32Value);
11646 IEM_MC_ADVANCE_RIP();
11647 IEM_MC_END();
11648 break;
11649 }
11650 case IEMMODE_64BIT:
11651 {
11652 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
11653 IEMOP_HLP_NO_LOCK_PREFIX();
11654
11655 IEM_MC_BEGIN(0, 1);
11656 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
11657 IEM_MC_STORE_GREG_U64(iReg, u64Value);
11658 IEM_MC_ADVANCE_RIP();
11659 IEM_MC_END();
11660 break;
11661 }
11662 }
11663
11664 return VINF_SUCCESS;
11665}
11666
11667
11668/** Opcode 0xb8. */
11669FNIEMOP_DEF(iemOp_eAX_Iv)
11670{
11671 IEMOP_MNEMONIC("mov rAX,IV");
11672 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pIemCpu->uRexB);
11673}
11674
11675
11676/** Opcode 0xb9. */
11677FNIEMOP_DEF(iemOp_eCX_Iv)
11678{
11679 IEMOP_MNEMONIC("mov rCX,IV");
11680 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pIemCpu->uRexB);
11681}
11682
11683
11684/** Opcode 0xba. */
11685FNIEMOP_DEF(iemOp_eDX_Iv)
11686{
11687 IEMOP_MNEMONIC("mov rDX,IV");
11688 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pIemCpu->uRexB);
11689}
11690
11691
11692/** Opcode 0xbb. */
11693FNIEMOP_DEF(iemOp_eBX_Iv)
11694{
11695 IEMOP_MNEMONIC("mov rBX,IV");
11696 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pIemCpu->uRexB);
11697}
11698
11699
11700/** Opcode 0xbc. */
11701FNIEMOP_DEF(iemOp_eSP_Iv)
11702{
11703 IEMOP_MNEMONIC("mov rSP,IV");
11704 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pIemCpu->uRexB);
11705}
11706
11707
11708/** Opcode 0xbd. */
11709FNIEMOP_DEF(iemOp_eBP_Iv)
11710{
11711 IEMOP_MNEMONIC("mov rBP,IV");
11712 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pIemCpu->uRexB);
11713}
11714
11715
11716/** Opcode 0xbe. */
11717FNIEMOP_DEF(iemOp_eSI_Iv)
11718{
11719 IEMOP_MNEMONIC("mov rSI,IV");
11720 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pIemCpu->uRexB);
11721}
11722
11723
11724/** Opcode 0xbf. */
11725FNIEMOP_DEF(iemOp_eDI_Iv)
11726{
11727 IEMOP_MNEMONIC("mov rDI,IV");
11728 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pIemCpu->uRexB);
11729}
11730
11731
11732/** Opcode 0xc0. */
11733FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
11734{
11735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11736 PCIEMOPSHIFTSIZES pImpl;
11737 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11738 {
11739 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
11740 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
11741 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
11742 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
11743 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
11744 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
11745 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
11746 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11747 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
11748 }
11749 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11750
11751 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11752 {
11753 /* register */
11754 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11755 IEMOP_HLP_NO_LOCK_PREFIX();
11756 IEM_MC_BEGIN(3, 0);
11757 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11758 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11759 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11760 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11761 IEM_MC_REF_EFLAGS(pEFlags);
11762 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11763 IEM_MC_ADVANCE_RIP();
11764 IEM_MC_END();
11765 }
11766 else
11767 {
11768 /* memory */
11769 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11770 IEM_MC_BEGIN(3, 2);
11771 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11772 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11773 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11775
11776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11777 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11778 IEM_MC_ASSIGN(cShiftArg, cShift);
11779 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11780 IEM_MC_FETCH_EFLAGS(EFlags);
11781 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11782
11783 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11784 IEM_MC_COMMIT_EFLAGS(EFlags);
11785 IEM_MC_ADVANCE_RIP();
11786 IEM_MC_END();
11787 }
11788 return VINF_SUCCESS;
11789}
11790
11791
11792/** Opcode 0xc1. */
11793FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
11794{
11795 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11796 PCIEMOPSHIFTSIZES pImpl;
11797 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11798 {
11799 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
11800 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
11801 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
11802 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
11803 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
11804 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
11805 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
11806 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11807 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
11808 }
11809 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11810
11811 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11812 {
11813 /* register */
11814 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11815 IEMOP_HLP_NO_LOCK_PREFIX();
11816 switch (pIemCpu->enmEffOpSize)
11817 {
11818 case IEMMODE_16BIT:
11819 IEM_MC_BEGIN(3, 0);
11820 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11821 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11822 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11823 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11824 IEM_MC_REF_EFLAGS(pEFlags);
11825 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
11826 IEM_MC_ADVANCE_RIP();
11827 IEM_MC_END();
11828 return VINF_SUCCESS;
11829
11830 case IEMMODE_32BIT:
11831 IEM_MC_BEGIN(3, 0);
11832 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11833 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11834 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11835 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11836 IEM_MC_REF_EFLAGS(pEFlags);
11837 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
11838 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11839 IEM_MC_ADVANCE_RIP();
11840 IEM_MC_END();
11841 return VINF_SUCCESS;
11842
11843 case IEMMODE_64BIT:
11844 IEM_MC_BEGIN(3, 0);
11845 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11846 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11847 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11848 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11849 IEM_MC_REF_EFLAGS(pEFlags);
11850 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
11851 IEM_MC_ADVANCE_RIP();
11852 IEM_MC_END();
11853 return VINF_SUCCESS;
11854
11855 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11856 }
11857 }
11858 else
11859 {
11860 /* memory */
11861 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11862 switch (pIemCpu->enmEffOpSize)
11863 {
11864 case IEMMODE_16BIT:
11865 IEM_MC_BEGIN(3, 2);
11866 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11867 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11868 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11869 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11870
11871 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11872 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11873 IEM_MC_ASSIGN(cShiftArg, cShift);
11874 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11875 IEM_MC_FETCH_EFLAGS(EFlags);
11876 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
11877
11878 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
11879 IEM_MC_COMMIT_EFLAGS(EFlags);
11880 IEM_MC_ADVANCE_RIP();
11881 IEM_MC_END();
11882 return VINF_SUCCESS;
11883
11884 case IEMMODE_32BIT:
11885 IEM_MC_BEGIN(3, 2);
11886 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11887 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11888 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11890
11891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11892 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11893 IEM_MC_ASSIGN(cShiftArg, cShift);
11894 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11895 IEM_MC_FETCH_EFLAGS(EFlags);
11896 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
11897
11898 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
11899 IEM_MC_COMMIT_EFLAGS(EFlags);
11900 IEM_MC_ADVANCE_RIP();
11901 IEM_MC_END();
11902 return VINF_SUCCESS;
11903
11904 case IEMMODE_64BIT:
11905 IEM_MC_BEGIN(3, 2);
11906 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11907 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11908 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11910
11911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11912 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11913 IEM_MC_ASSIGN(cShiftArg, cShift);
11914 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11915 IEM_MC_FETCH_EFLAGS(EFlags);
11916 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
11917
11918 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
11919 IEM_MC_COMMIT_EFLAGS(EFlags);
11920 IEM_MC_ADVANCE_RIP();
11921 IEM_MC_END();
11922 return VINF_SUCCESS;
11923
11924 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11925 }
11926 }
11927}
11928
11929
11930/** Opcode 0xc2. */
11931FNIEMOP_DEF(iemOp_retn_Iw)
11932{
11933 IEMOP_MNEMONIC("retn Iw");
11934 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11935 IEMOP_HLP_NO_LOCK_PREFIX();
11936 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11937 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, u16Imm);
11938}
11939
11940
11941/** Opcode 0xc3. */
11942FNIEMOP_DEF(iemOp_retn)
11943{
11944 IEMOP_MNEMONIC("retn");
11945 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11946 IEMOP_HLP_NO_LOCK_PREFIX();
11947 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, 0);
11948}
11949
11950
11951/** Opcode 0xc4. */
11952FNIEMOP_DEF(iemOp_les_Gv_Mp)
11953{
11954 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11955 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
11956 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11957 {
11958 IEMOP_MNEMONIC("2-byte-vex");
11959 /* The LES instruction is invalid 64-bit mode. In legacy and
11960 compatability mode it is invalid with MOD=3.
11961 The use as a VEX prefix is made possible by assigning the inverted
11962 REX.R to the top MOD bit, and the top bit in the inverted register
11963 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
11964 to accessing registers 0..7 in this VEX form. */
11965 /** @todo VEX: Just use new tables for it. */
11966 return IEMOP_RAISE_INVALID_OPCODE();
11967 }
11968 IEMOP_MNEMONIC("les Gv,Mp");
11969 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
11970}
11971
11972
11973/** Opcode 0xc5. */
11974FNIEMOP_DEF(iemOp_lds_Gv_Mp)
11975{
11976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11977 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
11978 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11979 {
11980 IEMOP_MNEMONIC("3-byte-vex");
11981 /* The LDS instruction is invalid 64-bit mode. In legacy and
11982 compatability mode it is invalid with MOD=3.
11983 The use as a VEX prefix is made possible by assigning the inverted
11984 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
11985 outside of 64-bit mode. */
11986 /** @todo VEX: Just use new tables for it. */
11987 return IEMOP_RAISE_INVALID_OPCODE();
11988 }
11989 IEMOP_MNEMONIC("lds Gv,Mp");
11990 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
11991}
11992
11993
11994/** Opcode 0xc6. */
11995FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
11996{
11997 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11998 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11999 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12000 return IEMOP_RAISE_INVALID_OPCODE();
12001 IEMOP_MNEMONIC("mov Eb,Ib");
12002
12003 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12004 {
12005 /* register access */
12006 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12007 IEM_MC_BEGIN(0, 0);
12008 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Imm);
12009 IEM_MC_ADVANCE_RIP();
12010 IEM_MC_END();
12011 }
12012 else
12013 {
12014 /* memory access. */
12015 IEM_MC_BEGIN(0, 1);
12016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12018 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12019 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Imm);
12020 IEM_MC_ADVANCE_RIP();
12021 IEM_MC_END();
12022 }
12023 return VINF_SUCCESS;
12024}
12025
12026
12027/** Opcode 0xc7. */
12028FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12029{
12030 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12031 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12032 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12033 return IEMOP_RAISE_INVALID_OPCODE();
12034 IEMOP_MNEMONIC("mov Ev,Iz");
12035
12036 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12037 {
12038 /* register access */
12039 switch (pIemCpu->enmEffOpSize)
12040 {
12041 case IEMMODE_16BIT:
12042 IEM_MC_BEGIN(0, 0);
12043 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12044 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Imm);
12045 IEM_MC_ADVANCE_RIP();
12046 IEM_MC_END();
12047 return VINF_SUCCESS;
12048
12049 case IEMMODE_32BIT:
12050 IEM_MC_BEGIN(0, 0);
12051 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12052 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Imm);
12053 IEM_MC_ADVANCE_RIP();
12054 IEM_MC_END();
12055 return VINF_SUCCESS;
12056
12057 case IEMMODE_64BIT:
12058 IEM_MC_BEGIN(0, 0);
12059 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12060 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Imm);
12061 IEM_MC_ADVANCE_RIP();
12062 IEM_MC_END();
12063 return VINF_SUCCESS;
12064
12065 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12066 }
12067 }
12068 else
12069 {
12070 /* memory access. */
12071 switch (pIemCpu->enmEffOpSize)
12072 {
12073 case IEMMODE_16BIT:
12074 IEM_MC_BEGIN(0, 1);
12075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12077 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12078 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Imm);
12079 IEM_MC_ADVANCE_RIP();
12080 IEM_MC_END();
12081 return VINF_SUCCESS;
12082
12083 case IEMMODE_32BIT:
12084 IEM_MC_BEGIN(0, 1);
12085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12087 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12088 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Imm);
12089 IEM_MC_ADVANCE_RIP();
12090 IEM_MC_END();
12091 return VINF_SUCCESS;
12092
12093 case IEMMODE_64BIT:
12094 IEM_MC_BEGIN(0, 1);
12095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12097 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12098 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Imm);
12099 IEM_MC_ADVANCE_RIP();
12100 IEM_MC_END();
12101 return VINF_SUCCESS;
12102
12103 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12104 }
12105 }
12106}
12107
12108
12109
12110
12111/** Opcode 0xc8. */
12112FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12113{
12114 IEMOP_MNEMONIC("enter Iw,Ib");
12115 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12116 IEMOP_HLP_NO_LOCK_PREFIX();
12117 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12118 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12119 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pIemCpu->enmEffOpSize, cbFrame, u8NestingLevel);
12120}
12121
12122
12123/** Opcode 0xc9. */
12124FNIEMOP_DEF(iemOp_leave)
12125{
12126 IEMOP_MNEMONIC("retn");
12127 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12128 IEMOP_HLP_NO_LOCK_PREFIX();
12129 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pIemCpu->enmEffOpSize);
12130}
12131
12132
12133/** Opcode 0xca. */
12134FNIEMOP_DEF(iemOp_retf_Iw)
12135{
12136 IEMOP_MNEMONIC("retf Iw");
12137 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12138 IEMOP_HLP_NO_LOCK_PREFIX();
12139 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12140 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, u16Imm);
12141}
12142
12143
12144/** Opcode 0xcb. */
12145FNIEMOP_DEF(iemOp_retf)
12146{
12147 IEMOP_MNEMONIC("retf");
12148 IEMOP_HLP_NO_LOCK_PREFIX();
12149 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12150 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, 0);
12151}
12152
12153
12154/** Opcode 0xcc. */
12155FNIEMOP_DEF(iemOp_int_3)
12156{
12157 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
12158}
12159
12160
12161/** Opcode 0xcd. */
12162FNIEMOP_DEF(iemOp_int_Ib)
12163{
12164 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
12165 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
12166}
12167
12168
12169/** Opcode 0xce. */
12170FNIEMOP_DEF(iemOp_into)
12171{
12172 IEM_MC_BEGIN(2, 0);
12173 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
12174 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
12175 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
12176 IEM_MC_END();
12177 return VINF_SUCCESS;
12178}
12179
12180
12181/** Opcode 0xcf. */
12182FNIEMOP_DEF(iemOp_iret)
12183{
12184 IEMOP_MNEMONIC("iret");
12185 IEMOP_HLP_NO_LOCK_PREFIX();
12186 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pIemCpu->enmEffOpSize);
12187}
12188
12189
12190/** Opcode 0xd0. */
12191FNIEMOP_DEF(iemOp_Grp2_Eb_1)
12192{
12193 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12194 PCIEMOPSHIFTSIZES pImpl;
12195 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12196 {
12197 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
12198 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
12199 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
12200 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
12201 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
12202 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
12203 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
12204 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12205 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12206 }
12207 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12208
12209 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12210 {
12211 /* register */
12212 IEMOP_HLP_NO_LOCK_PREFIX();
12213 IEM_MC_BEGIN(3, 0);
12214 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12215 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12216 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12217 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12218 IEM_MC_REF_EFLAGS(pEFlags);
12219 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12220 IEM_MC_ADVANCE_RIP();
12221 IEM_MC_END();
12222 }
12223 else
12224 {
12225 /* memory */
12226 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12227 IEM_MC_BEGIN(3, 2);
12228 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12229 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12230 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12232
12233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12234 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12235 IEM_MC_FETCH_EFLAGS(EFlags);
12236 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12237
12238 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12239 IEM_MC_COMMIT_EFLAGS(EFlags);
12240 IEM_MC_ADVANCE_RIP();
12241 IEM_MC_END();
12242 }
12243 return VINF_SUCCESS;
12244}
12245
12246
12247
12248/** Opcode 0xd1. */
12249FNIEMOP_DEF(iemOp_Grp2_Ev_1)
12250{
12251 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12252 PCIEMOPSHIFTSIZES pImpl;
12253 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12254 {
12255 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
12256 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
12257 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
12258 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
12259 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
12260 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
12261 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
12262 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12263 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12264 }
12265 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12266
12267 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12268 {
12269 /* register */
12270 IEMOP_HLP_NO_LOCK_PREFIX();
12271 switch (pIemCpu->enmEffOpSize)
12272 {
12273 case IEMMODE_16BIT:
12274 IEM_MC_BEGIN(3, 0);
12275 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12276 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12277 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12278 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12279 IEM_MC_REF_EFLAGS(pEFlags);
12280 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12281 IEM_MC_ADVANCE_RIP();
12282 IEM_MC_END();
12283 return VINF_SUCCESS;
12284
12285 case IEMMODE_32BIT:
12286 IEM_MC_BEGIN(3, 0);
12287 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12288 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12289 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12290 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12291 IEM_MC_REF_EFLAGS(pEFlags);
12292 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12293 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12294 IEM_MC_ADVANCE_RIP();
12295 IEM_MC_END();
12296 return VINF_SUCCESS;
12297
12298 case IEMMODE_64BIT:
12299 IEM_MC_BEGIN(3, 0);
12300 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12301 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12302 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12303 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12304 IEM_MC_REF_EFLAGS(pEFlags);
12305 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12306 IEM_MC_ADVANCE_RIP();
12307 IEM_MC_END();
12308 return VINF_SUCCESS;
12309
12310 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12311 }
12312 }
12313 else
12314 {
12315 /* memory */
12316 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12317 switch (pIemCpu->enmEffOpSize)
12318 {
12319 case IEMMODE_16BIT:
12320 IEM_MC_BEGIN(3, 2);
12321 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12322 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12323 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12324 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12325
12326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12327 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12328 IEM_MC_FETCH_EFLAGS(EFlags);
12329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12330
12331 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12332 IEM_MC_COMMIT_EFLAGS(EFlags);
12333 IEM_MC_ADVANCE_RIP();
12334 IEM_MC_END();
12335 return VINF_SUCCESS;
12336
12337 case IEMMODE_32BIT:
12338 IEM_MC_BEGIN(3, 2);
12339 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12340 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12341 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12343
12344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12345 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12346 IEM_MC_FETCH_EFLAGS(EFlags);
12347 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12348
12349 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12350 IEM_MC_COMMIT_EFLAGS(EFlags);
12351 IEM_MC_ADVANCE_RIP();
12352 IEM_MC_END();
12353 return VINF_SUCCESS;
12354
12355 case IEMMODE_64BIT:
12356 IEM_MC_BEGIN(3, 2);
12357 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12358 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12359 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12361
12362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12363 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12364 IEM_MC_FETCH_EFLAGS(EFlags);
12365 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12366
12367 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12368 IEM_MC_COMMIT_EFLAGS(EFlags);
12369 IEM_MC_ADVANCE_RIP();
12370 IEM_MC_END();
12371 return VINF_SUCCESS;
12372
12373 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12374 }
12375 }
12376}
12377
12378
12379/** Opcode 0xd2. */
12380FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
12381{
12382 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12383 PCIEMOPSHIFTSIZES pImpl;
12384 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12385 {
12386 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
12387 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
12388 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
12389 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
12390 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
12391 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
12392 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
12393 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12394 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
12395 }
12396 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12397
12398 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12399 {
12400 /* register */
12401 IEMOP_HLP_NO_LOCK_PREFIX();
12402 IEM_MC_BEGIN(3, 0);
12403 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12404 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12405 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12406 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12407 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12408 IEM_MC_REF_EFLAGS(pEFlags);
12409 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12410 IEM_MC_ADVANCE_RIP();
12411 IEM_MC_END();
12412 }
12413 else
12414 {
12415 /* memory */
12416 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12417 IEM_MC_BEGIN(3, 2);
12418 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12419 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12420 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12422
12423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12424 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12425 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12426 IEM_MC_FETCH_EFLAGS(EFlags);
12427 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12428
12429 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12430 IEM_MC_COMMIT_EFLAGS(EFlags);
12431 IEM_MC_ADVANCE_RIP();
12432 IEM_MC_END();
12433 }
12434 return VINF_SUCCESS;
12435}
12436
12437
12438/** Opcode 0xd3. */
12439FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
12440{
12441 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12442 PCIEMOPSHIFTSIZES pImpl;
12443 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12444 {
12445 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
12446 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
12447 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
12448 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
12449 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
12450 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
12451 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
12452 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12453 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12454 }
12455 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12456
12457 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12458 {
12459 /* register */
12460 IEMOP_HLP_NO_LOCK_PREFIX();
12461 switch (pIemCpu->enmEffOpSize)
12462 {
12463 case IEMMODE_16BIT:
12464 IEM_MC_BEGIN(3, 0);
12465 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12466 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12467 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12468 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12469 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12470 IEM_MC_REF_EFLAGS(pEFlags);
12471 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12472 IEM_MC_ADVANCE_RIP();
12473 IEM_MC_END();
12474 return VINF_SUCCESS;
12475
12476 case IEMMODE_32BIT:
12477 IEM_MC_BEGIN(3, 0);
12478 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12479 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12480 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12481 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12482 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12483 IEM_MC_REF_EFLAGS(pEFlags);
12484 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12485 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12486 IEM_MC_ADVANCE_RIP();
12487 IEM_MC_END();
12488 return VINF_SUCCESS;
12489
12490 case IEMMODE_64BIT:
12491 IEM_MC_BEGIN(3, 0);
12492 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12493 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12494 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12495 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12496 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12497 IEM_MC_REF_EFLAGS(pEFlags);
12498 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12499 IEM_MC_ADVANCE_RIP();
12500 IEM_MC_END();
12501 return VINF_SUCCESS;
12502
12503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12504 }
12505 }
12506 else
12507 {
12508 /* memory */
12509 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12510 switch (pIemCpu->enmEffOpSize)
12511 {
12512 case IEMMODE_16BIT:
12513 IEM_MC_BEGIN(3, 2);
12514 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12515 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12516 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12518
12519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12520 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12521 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12522 IEM_MC_FETCH_EFLAGS(EFlags);
12523 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12524
12525 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12526 IEM_MC_COMMIT_EFLAGS(EFlags);
12527 IEM_MC_ADVANCE_RIP();
12528 IEM_MC_END();
12529 return VINF_SUCCESS;
12530
12531 case IEMMODE_32BIT:
12532 IEM_MC_BEGIN(3, 2);
12533 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12534 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12535 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12537
12538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12539 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12540 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12541 IEM_MC_FETCH_EFLAGS(EFlags);
12542 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12543
12544 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12545 IEM_MC_COMMIT_EFLAGS(EFlags);
12546 IEM_MC_ADVANCE_RIP();
12547 IEM_MC_END();
12548 return VINF_SUCCESS;
12549
12550 case IEMMODE_64BIT:
12551 IEM_MC_BEGIN(3, 2);
12552 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12553 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12554 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12556
12557 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12558 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12559 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12560 IEM_MC_FETCH_EFLAGS(EFlags);
12561 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12562
12563 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12564 IEM_MC_COMMIT_EFLAGS(EFlags);
12565 IEM_MC_ADVANCE_RIP();
12566 IEM_MC_END();
12567 return VINF_SUCCESS;
12568
12569 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12570 }
12571 }
12572}
12573
12574/** Opcode 0xd4. */
12575FNIEMOP_DEF(iemOp_aam_Ib)
12576{
12577 IEMOP_MNEMONIC("aam Ib");
12578 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12579 IEMOP_HLP_NO_LOCK_PREFIX();
12580 IEMOP_HLP_NO_64BIT();
12581 if (!bImm)
12582 return IEMOP_RAISE_DIVIDE_ERROR();
12583 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
12584}
12585
12586
12587/** Opcode 0xd5. */
12588FNIEMOP_DEF(iemOp_aad_Ib)
12589{
12590 IEMOP_MNEMONIC("aad Ib");
12591 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12592 IEMOP_HLP_NO_LOCK_PREFIX();
12593 IEMOP_HLP_NO_64BIT();
12594 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
12595}
12596
12597
12598/** Opcode 0xd7. */
12599FNIEMOP_DEF(iemOp_xlat)
12600{
12601 IEMOP_MNEMONIC("xlat");
12602 IEMOP_HLP_NO_LOCK_PREFIX();
12603 switch (pIemCpu->enmEffAddrMode)
12604 {
12605 case IEMMODE_16BIT:
12606 IEM_MC_BEGIN(2, 0);
12607 IEM_MC_LOCAL(uint8_t, u8Tmp);
12608 IEM_MC_LOCAL(uint16_t, u16Addr);
12609 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
12610 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
12611 IEM_MC_FETCH_MEM16_U8(u8Tmp, pIemCpu->iEffSeg, u16Addr);
12612 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12613 IEM_MC_ADVANCE_RIP();
12614 IEM_MC_END();
12615 return VINF_SUCCESS;
12616
12617 case IEMMODE_32BIT:
12618 IEM_MC_BEGIN(2, 0);
12619 IEM_MC_LOCAL(uint8_t, u8Tmp);
12620 IEM_MC_LOCAL(uint32_t, u32Addr);
12621 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
12622 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
12623 IEM_MC_FETCH_MEM32_U8(u8Tmp, pIemCpu->iEffSeg, u32Addr);
12624 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12625 IEM_MC_ADVANCE_RIP();
12626 IEM_MC_END();
12627 return VINF_SUCCESS;
12628
12629 case IEMMODE_64BIT:
12630 IEM_MC_BEGIN(2, 0);
12631 IEM_MC_LOCAL(uint8_t, u8Tmp);
12632 IEM_MC_LOCAL(uint64_t, u64Addr);
12633 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
12634 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
12635 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u64Addr);
12636 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12637 IEM_MC_ADVANCE_RIP();
12638 IEM_MC_END();
12639 return VINF_SUCCESS;
12640
12641 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12642 }
12643}
12644
12645
12646/**
12647 * Common worker for FPU instructions working on ST0 and STn, and storing the
12648 * result in ST0.
12649 *
12650 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12651 */
12652FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
12653{
12654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12655
12656 IEM_MC_BEGIN(3, 1);
12657 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12658 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12659 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12660 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12661
12662 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12663 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12664 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12665 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
12666 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
12667 IEM_MC_ELSE()
12668 IEM_MC_FPU_STACK_UNDERFLOW(0);
12669 IEM_MC_ENDIF();
12670 IEM_MC_USED_FPU();
12671 IEM_MC_ADVANCE_RIP();
12672
12673 IEM_MC_END();
12674 return VINF_SUCCESS;
12675}
12676
12677
12678/**
12679 * Common worker for FPU instructions working on ST0 and STn, and only affecting
12680 * flags.
12681 *
12682 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12683 */
12684FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
12685{
12686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12687
12688 IEM_MC_BEGIN(3, 1);
12689 IEM_MC_LOCAL(uint16_t, u16Fsw);
12690 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12691 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12692 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12693
12694 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12695 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12696 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12697 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
12698 IEM_MC_UPDATE_FSW(u16Fsw);
12699 IEM_MC_ELSE()
12700 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
12701 IEM_MC_ENDIF();
12702 IEM_MC_USED_FPU();
12703 IEM_MC_ADVANCE_RIP();
12704
12705 IEM_MC_END();
12706 return VINF_SUCCESS;
12707}
12708
12709
12710/**
12711 * Common worker for FPU instructions working on ST0 and STn, only affecting
12712 * flags, and popping when done.
12713 *
12714 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12715 */
12716FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
12717{
12718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12719
12720 IEM_MC_BEGIN(3, 1);
12721 IEM_MC_LOCAL(uint16_t, u16Fsw);
12722 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12723 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12724 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12725
12726 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12727 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12728 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12729 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
12730 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
12731 IEM_MC_ELSE()
12732 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
12733 IEM_MC_ENDIF();
12734 IEM_MC_USED_FPU();
12735 IEM_MC_ADVANCE_RIP();
12736
12737 IEM_MC_END();
12738 return VINF_SUCCESS;
12739}
12740
12741
12742/** Opcode 0xd8 11/0. */
12743FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
12744{
12745 IEMOP_MNEMONIC("fadd st0,stN");
12746 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
12747}
12748
12749
12750/** Opcode 0xd8 11/1. */
12751FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
12752{
12753 IEMOP_MNEMONIC("fmul st0,stN");
12754 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
12755}
12756
12757
12758/** Opcode 0xd8 11/2. */
12759FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
12760{
12761 IEMOP_MNEMONIC("fcom st0,stN");
12762 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
12763}
12764
12765
12766/** Opcode 0xd8 11/3. */
12767FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
12768{
12769 IEMOP_MNEMONIC("fcomp st0,stN");
12770 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
12771}
12772
12773
12774/** Opcode 0xd8 11/4. */
12775FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
12776{
12777 IEMOP_MNEMONIC("fsub st0,stN");
12778 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
12779}
12780
12781
12782/** Opcode 0xd8 11/5. */
12783FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
12784{
12785 IEMOP_MNEMONIC("fsubr st0,stN");
12786 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
12787}
12788
12789
12790/** Opcode 0xd8 11/6. */
12791FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
12792{
12793 IEMOP_MNEMONIC("fdiv st0,stN");
12794 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
12795}
12796
12797
12798/** Opcode 0xd8 11/7. */
12799FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
12800{
12801 IEMOP_MNEMONIC("fdivr st0,stN");
12802 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
12803}
12804
12805
12806/**
12807 * Common worker for FPU instructions working on ST0 and an m32r, and storing
12808 * the result in ST0.
12809 *
12810 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12811 */
12812FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
12813{
12814 IEM_MC_BEGIN(3, 3);
12815 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12816 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12817 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
12818 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12819 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12820 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
12821
12822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12824
12825 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12826 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12827 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12828
12829 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12830 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
12831 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
12832 IEM_MC_ELSE()
12833 IEM_MC_FPU_STACK_UNDERFLOW(0);
12834 IEM_MC_ENDIF();
12835 IEM_MC_USED_FPU();
12836 IEM_MC_ADVANCE_RIP();
12837
12838 IEM_MC_END();
12839 return VINF_SUCCESS;
12840}
12841
12842
12843/** Opcode 0xd8 !11/0. */
12844FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
12845{
12846 IEMOP_MNEMONIC("fadd st0,m32r");
12847 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
12848}
12849
12850
12851/** Opcode 0xd8 !11/1. */
12852FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
12853{
12854 IEMOP_MNEMONIC("fmul st0,m32r");
12855 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
12856}
12857
12858
12859/** Opcode 0xd8 !11/2. */
12860FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
12861{
12862 IEMOP_MNEMONIC("fcom st0,m32r");
12863
12864 IEM_MC_BEGIN(3, 3);
12865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12866 IEM_MC_LOCAL(uint16_t, u16Fsw);
12867 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
12868 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12869 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12870 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
12871
12872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12874
12875 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12876 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12877 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12878
12879 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12880 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
12881 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
12882 IEM_MC_ELSE()
12883 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
12884 IEM_MC_ENDIF();
12885 IEM_MC_USED_FPU();
12886 IEM_MC_ADVANCE_RIP();
12887
12888 IEM_MC_END();
12889 return VINF_SUCCESS;
12890}
12891
12892
12893/** Opcode 0xd8 !11/3. */
12894FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
12895{
12896 IEMOP_MNEMONIC("fcomp st0,m32r");
12897
12898 IEM_MC_BEGIN(3, 3);
12899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12900 IEM_MC_LOCAL(uint16_t, u16Fsw);
12901 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
12902 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12903 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12904 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
12905
12906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12908
12909 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12910 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12911 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12912
12913 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12914 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
12915 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
12916 IEM_MC_ELSE()
12917 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
12918 IEM_MC_ENDIF();
12919 IEM_MC_USED_FPU();
12920 IEM_MC_ADVANCE_RIP();
12921
12922 IEM_MC_END();
12923 return VINF_SUCCESS;
12924}
12925
12926
12927/** Opcode 0xd8 !11/4. */
12928FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
12929{
12930 IEMOP_MNEMONIC("fsub st0,m32r");
12931 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
12932}
12933
12934
12935/** Opcode 0xd8 !11/5. */
12936FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
12937{
12938 IEMOP_MNEMONIC("fsubr st0,m32r");
12939 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
12940}
12941
12942
12943/** Opcode 0xd8 !11/6. */
12944FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
12945{
12946 IEMOP_MNEMONIC("fdiv st0,m32r");
12947 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
12948}
12949
12950
12951/** Opcode 0xd8 !11/7. */
12952FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
12953{
12954 IEMOP_MNEMONIC("fdivr st0,m32r");
12955 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
12956}
12957
12958
12959/** Opcode 0xd8. */
12960FNIEMOP_DEF(iemOp_EscF0)
12961{
12962 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
12963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12964
12965 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12966 {
12967 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12968 {
12969 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
12970 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
12971 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
12972 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
12973 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
12974 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
12975 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
12976 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
12977 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12978 }
12979 }
12980 else
12981 {
12982 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12983 {
12984 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
12985 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
12986 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
12987 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
12988 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
12989 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
12990 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
12991 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
12992 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12993 }
12994 }
12995}
12996
12997
12998/** Opcode 0xd9 /0 mem32real
12999 * @sa iemOp_fld_m64r */
13000FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13001{
13002 IEMOP_MNEMONIC("fld m32r");
13003
13004 IEM_MC_BEGIN(2, 3);
13005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13006 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13007 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13008 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13009 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13010
13011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13013
13014 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13015 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13016 IEM_MC_FETCH_MEM_R32(r32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
13017
13018 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13019 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13020 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
13021 IEM_MC_ELSE()
13022 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
13023 IEM_MC_ENDIF();
13024 IEM_MC_USED_FPU();
13025 IEM_MC_ADVANCE_RIP();
13026
13027 IEM_MC_END();
13028 return VINF_SUCCESS;
13029}
13030
13031
13032/** Opcode 0xd9 !11/2 mem32real */
13033FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13034{
13035 IEMOP_MNEMONIC("fst m32r");
13036 IEM_MC_BEGIN(3, 2);
13037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13038 IEM_MC_LOCAL(uint16_t, u16Fsw);
13039 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13040 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13041 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13042
13043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13045 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13046 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13047
13048 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13049 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13050 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13051 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13052 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13053 IEM_MC_ELSE()
13054 IEM_MC_IF_FCW_IM()
13055 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13056 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13057 IEM_MC_ENDIF();
13058 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13059 IEM_MC_ENDIF();
13060 IEM_MC_USED_FPU();
13061 IEM_MC_ADVANCE_RIP();
13062
13063 IEM_MC_END();
13064 return VINF_SUCCESS;
13065}
13066
13067
13068/** Opcode 0xd9 !11/3 */
13069FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13070{
13071 IEMOP_MNEMONIC("fstp m32r");
13072 IEM_MC_BEGIN(3, 2);
13073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13074 IEM_MC_LOCAL(uint16_t, u16Fsw);
13075 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13076 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13077 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13078
13079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13081 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13082 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13083
13084 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13085 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13086 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13087 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13088 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13089 IEM_MC_ELSE()
13090 IEM_MC_IF_FCW_IM()
13091 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13092 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13093 IEM_MC_ENDIF();
13094 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13095 IEM_MC_ENDIF();
13096 IEM_MC_USED_FPU();
13097 IEM_MC_ADVANCE_RIP();
13098
13099 IEM_MC_END();
13100 return VINF_SUCCESS;
13101}
13102
13103
13104/** Opcode 0xd9 !11/4 */
13105FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
13106{
13107 IEMOP_MNEMONIC("fldenv m14/28byte");
13108 IEM_MC_BEGIN(3, 0);
13109 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13110 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pIemCpu->iEffSeg, 1);
13111 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13112 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13114 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13115 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13116 IEM_MC_END();
13117 return VINF_SUCCESS;
13118}
13119
13120
13121/** Opcode 0xd9 !11/5 */
13122FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
13123{
13124 IEMOP_MNEMONIC("fldcw m2byte");
13125 IEM_MC_BEGIN(1, 1);
13126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13127 IEM_MC_ARG(uint16_t, u16Fsw, 0);
13128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13130 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13131 IEM_MC_FETCH_MEM_U16(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13132 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
13133 IEM_MC_END();
13134 return VINF_SUCCESS;
13135}
13136
13137
13138/** Opcode 0xd9 !11/6 */
13139FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
13140{
13141 IEMOP_MNEMONIC("fstenv m14/m28byte");
13142 IEM_MC_BEGIN(3, 0);
13143 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13144 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pIemCpu->iEffSeg, 1);
13145 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13146 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13148 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13149 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
13150 IEM_MC_END();
13151 return VINF_SUCCESS;
13152}
13153
13154
13155/** Opcode 0xd9 !11/7 */
13156FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
13157{
13158 IEMOP_MNEMONIC("fnstcw m2byte");
13159 IEM_MC_BEGIN(2, 0);
13160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13161 IEM_MC_LOCAL(uint16_t, u16Fcw);
13162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13164 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13165 IEM_MC_FETCH_FCW(u16Fcw);
13166 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Fcw);
13167 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13168 IEM_MC_END();
13169 return VINF_SUCCESS;
13170}
13171
13172
13173/** Opcode 0xd9 0xc9, 0xd9 0xd8-0xdf, ++?. */
13174FNIEMOP_DEF(iemOp_fnop)
13175{
13176 IEMOP_MNEMONIC("fnop");
13177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13178
13179 IEM_MC_BEGIN(0, 0);
13180 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13181 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13182 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
13183 * intel optimizations. Investigate. */
13184 IEM_MC_UPDATE_FPU_OPCODE_IP();
13185 IEM_MC_USED_FPU();
13186 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13187 IEM_MC_END();
13188 return VINF_SUCCESS;
13189}
13190
13191
13192/** Opcode 0xd9 11/0 stN */
13193FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
13194{
13195 IEMOP_MNEMONIC("fld stN");
13196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13197
13198 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13199 * indicates that it does. */
13200 IEM_MC_BEGIN(0, 2);
13201 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13202 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13203 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13204 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13205 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
13206 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13207 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13208 IEM_MC_ELSE()
13209 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
13210 IEM_MC_ENDIF();
13211 IEM_MC_USED_FPU();
13212 IEM_MC_ADVANCE_RIP();
13213 IEM_MC_END();
13214
13215 return VINF_SUCCESS;
13216}
13217
13218
13219/** Opcode 0xd9 11/3 stN */
13220FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
13221{
13222 IEMOP_MNEMONIC("fxch stN");
13223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13224
13225 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13226 * indicates that it does. */
13227 IEM_MC_BEGIN(1, 3);
13228 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
13229 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
13230 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13231 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
13232 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13233 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13234 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13235 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
13236 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
13237 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13238 IEM_MC_ELSE()
13239 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
13240 IEM_MC_ENDIF();
13241 IEM_MC_USED_FPU();
13242 IEM_MC_ADVANCE_RIP();
13243 IEM_MC_END();
13244
13245 return VINF_SUCCESS;
13246}
13247
13248
13249/** Opcode 0xd9 11/4, 0xdd 11/2. */
13250FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
13251{
13252 IEMOP_MNEMONIC("fstp st0,stN");
13253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13254
13255 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
13256 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
13257 if (!iDstReg)
13258 {
13259 IEM_MC_BEGIN(0, 1);
13260 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
13261 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13262 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13263 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
13264 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13265 IEM_MC_ELSE()
13266 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
13267 IEM_MC_ENDIF();
13268 IEM_MC_USED_FPU();
13269 IEM_MC_ADVANCE_RIP();
13270 IEM_MC_END();
13271 }
13272 else
13273 {
13274 IEM_MC_BEGIN(0, 2);
13275 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13276 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13277 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13278 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13279 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13280 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13281 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
13282 IEM_MC_ELSE()
13283 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
13284 IEM_MC_ENDIF();
13285 IEM_MC_USED_FPU();
13286 IEM_MC_ADVANCE_RIP();
13287 IEM_MC_END();
13288 }
13289 return VINF_SUCCESS;
13290}
13291
13292
13293/**
13294 * Common worker for FPU instructions working on ST0 and replaces it with the
13295 * result, i.e. unary operators.
13296 *
13297 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13298 */
13299FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
13300{
13301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13302
13303 IEM_MC_BEGIN(2, 1);
13304 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13305 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13306 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13307
13308 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13309 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13310 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13311 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
13312 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13313 IEM_MC_ELSE()
13314 IEM_MC_FPU_STACK_UNDERFLOW(0);
13315 IEM_MC_ENDIF();
13316 IEM_MC_USED_FPU();
13317 IEM_MC_ADVANCE_RIP();
13318
13319 IEM_MC_END();
13320 return VINF_SUCCESS;
13321}
13322
13323
13324/** Opcode 0xd9 0xe0. */
13325FNIEMOP_DEF(iemOp_fchs)
13326{
13327 IEMOP_MNEMONIC("fchs st0");
13328 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
13329}
13330
13331
13332/** Opcode 0xd9 0xe1. */
13333FNIEMOP_DEF(iemOp_fabs)
13334{
13335 IEMOP_MNEMONIC("fabs st0");
13336 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
13337}
13338
13339
13340/**
13341 * Common worker for FPU instructions working on ST0 and only returns FSW.
13342 *
13343 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13344 */
13345FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
13346{
13347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13348
13349 IEM_MC_BEGIN(2, 1);
13350 IEM_MC_LOCAL(uint16_t, u16Fsw);
13351 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13352 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13353
13354 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13355 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13356 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13357 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
13358 IEM_MC_UPDATE_FSW(u16Fsw);
13359 IEM_MC_ELSE()
13360 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13361 IEM_MC_ENDIF();
13362 IEM_MC_USED_FPU();
13363 IEM_MC_ADVANCE_RIP();
13364
13365 IEM_MC_END();
13366 return VINF_SUCCESS;
13367}
13368
13369
13370/** Opcode 0xd9 0xe4. */
13371FNIEMOP_DEF(iemOp_ftst)
13372{
13373 IEMOP_MNEMONIC("ftst st0");
13374 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
13375}
13376
13377
13378/** Opcode 0xd9 0xe5. */
13379FNIEMOP_DEF(iemOp_fxam)
13380{
13381 IEMOP_MNEMONIC("fxam st0");
13382 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
13383}
13384
13385
13386/**
13387 * Common worker for FPU instructions pushing a constant onto the FPU stack.
13388 *
13389 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13390 */
13391FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
13392{
13393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13394
13395 IEM_MC_BEGIN(1, 1);
13396 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13397 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13398
13399 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13400 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13401 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13402 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
13403 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13404 IEM_MC_ELSE()
13405 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
13406 IEM_MC_ENDIF();
13407 IEM_MC_USED_FPU();
13408 IEM_MC_ADVANCE_RIP();
13409
13410 IEM_MC_END();
13411 return VINF_SUCCESS;
13412}
13413
13414
13415/** Opcode 0xd9 0xe8. */
13416FNIEMOP_DEF(iemOp_fld1)
13417{
13418 IEMOP_MNEMONIC("fld1");
13419 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
13420}
13421
13422
13423/** Opcode 0xd9 0xe9. */
13424FNIEMOP_DEF(iemOp_fldl2t)
13425{
13426 IEMOP_MNEMONIC("fldl2t");
13427 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
13428}
13429
13430
13431/** Opcode 0xd9 0xea. */
13432FNIEMOP_DEF(iemOp_fldl2e)
13433{
13434 IEMOP_MNEMONIC("fldl2e");
13435 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
13436}
13437
13438/** Opcode 0xd9 0xeb. */
13439FNIEMOP_DEF(iemOp_fldpi)
13440{
13441 IEMOP_MNEMONIC("fldpi");
13442 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
13443}
13444
13445
13446/** Opcode 0xd9 0xec. */
13447FNIEMOP_DEF(iemOp_fldlg2)
13448{
13449 IEMOP_MNEMONIC("fldlg2");
13450 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
13451}
13452
13453/** Opcode 0xd9 0xed. */
13454FNIEMOP_DEF(iemOp_fldln2)
13455{
13456 IEMOP_MNEMONIC("fldln2");
13457 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
13458}
13459
13460
13461/** Opcode 0xd9 0xee. */
13462FNIEMOP_DEF(iemOp_fldz)
13463{
13464 IEMOP_MNEMONIC("fldz");
13465 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
13466}
13467
13468
13469/** Opcode 0xd9 0xf0. */
13470FNIEMOP_DEF(iemOp_f2xm1)
13471{
13472 IEMOP_MNEMONIC("f2xm1 st0");
13473 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
13474}
13475
13476
13477/** Opcode 0xd9 0xf1. */
13478FNIEMOP_DEF(iemOp_fylx2)
13479{
13480 IEMOP_MNEMONIC("fylx2 st0");
13481 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
13482}
13483
13484
13485/**
13486 * Common worker for FPU instructions working on ST0 and having two outputs, one
13487 * replacing ST0 and one pushed onto the stack.
13488 *
13489 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13490 */
13491FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
13492{
13493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13494
13495 IEM_MC_BEGIN(2, 1);
13496 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
13497 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
13498 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13499
13500 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13501 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13502 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13503 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
13504 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
13505 IEM_MC_ELSE()
13506 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
13507 IEM_MC_ENDIF();
13508 IEM_MC_USED_FPU();
13509 IEM_MC_ADVANCE_RIP();
13510
13511 IEM_MC_END();
13512 return VINF_SUCCESS;
13513}
13514
13515
13516/** Opcode 0xd9 0xf2. */
13517FNIEMOP_DEF(iemOp_fptan)
13518{
13519 IEMOP_MNEMONIC("fptan st0");
13520 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
13521}
13522
13523
13524/**
13525 * Common worker for FPU instructions working on STn and ST0, storing the result
13526 * in STn, and popping the stack unless IE, DE or ZE was raised.
13527 *
13528 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13529 */
13530FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13531{
13532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13533
13534 IEM_MC_BEGIN(3, 1);
13535 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13536 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13537 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13538 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13539
13540 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13541 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13542
13543 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
13544 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13545 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
13546 IEM_MC_ELSE()
13547 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
13548 IEM_MC_ENDIF();
13549 IEM_MC_USED_FPU();
13550 IEM_MC_ADVANCE_RIP();
13551
13552 IEM_MC_END();
13553 return VINF_SUCCESS;
13554}
13555
13556
13557/** Opcode 0xd9 0xf3. */
13558FNIEMOP_DEF(iemOp_fpatan)
13559{
13560 IEMOP_MNEMONIC("fpatan st1,st0");
13561 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
13562}
13563
13564
13565/** Opcode 0xd9 0xf4. */
13566FNIEMOP_DEF(iemOp_fxtract)
13567{
13568 IEMOP_MNEMONIC("fxtract st0");
13569 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
13570}
13571
13572
13573/** Opcode 0xd9 0xf5. */
13574FNIEMOP_DEF(iemOp_fprem1)
13575{
13576 IEMOP_MNEMONIC("fprem1 st0, st1");
13577 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
13578}
13579
13580
13581/** Opcode 0xd9 0xf6. */
13582FNIEMOP_DEF(iemOp_fdecstp)
13583{
13584 IEMOP_MNEMONIC("fdecstp");
13585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13586 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13587 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13588 * FINCSTP and FDECSTP. */
13589
13590 IEM_MC_BEGIN(0,0);
13591
13592 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13593 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13594
13595 IEM_MC_FPU_STACK_DEC_TOP();
13596 IEM_MC_UPDATE_FSW_CONST(0);
13597
13598 IEM_MC_USED_FPU();
13599 IEM_MC_ADVANCE_RIP();
13600 IEM_MC_END();
13601 return VINF_SUCCESS;
13602}
13603
13604
13605/** Opcode 0xd9 0xf7. */
13606FNIEMOP_DEF(iemOp_fincstp)
13607{
13608 IEMOP_MNEMONIC("fincstp");
13609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13610 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13611 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13612 * FINCSTP and FDECSTP. */
13613
13614 IEM_MC_BEGIN(0,0);
13615
13616 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13617 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13618
13619 IEM_MC_FPU_STACK_INC_TOP();
13620 IEM_MC_UPDATE_FSW_CONST(0);
13621
13622 IEM_MC_USED_FPU();
13623 IEM_MC_ADVANCE_RIP();
13624 IEM_MC_END();
13625 return VINF_SUCCESS;
13626}
13627
13628
13629/** Opcode 0xd9 0xf8. */
13630FNIEMOP_DEF(iemOp_fprem)
13631{
13632 IEMOP_MNEMONIC("fprem st0, st1");
13633 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
13634}
13635
13636
13637/** Opcode 0xd9 0xf9. */
13638FNIEMOP_DEF(iemOp_fyl2xp1)
13639{
13640 IEMOP_MNEMONIC("fyl2xp1 st1,st0");
13641 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
13642}
13643
13644
13645/** Opcode 0xd9 0xfa. */
13646FNIEMOP_DEF(iemOp_fsqrt)
13647{
13648 IEMOP_MNEMONIC("fsqrt st0");
13649 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
13650}
13651
13652
13653/** Opcode 0xd9 0xfb. */
13654FNIEMOP_DEF(iemOp_fsincos)
13655{
13656 IEMOP_MNEMONIC("fsincos st0");
13657 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
13658}
13659
13660
13661/** Opcode 0xd9 0xfc. */
13662FNIEMOP_DEF(iemOp_frndint)
13663{
13664 IEMOP_MNEMONIC("frndint st0");
13665 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
13666}
13667
13668
13669/** Opcode 0xd9 0xfd. */
13670FNIEMOP_DEF(iemOp_fscale)
13671{
13672 IEMOP_MNEMONIC("fscale st0, st1");
13673 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
13674}
13675
13676
13677/** Opcode 0xd9 0xfe. */
13678FNIEMOP_DEF(iemOp_fsin)
13679{
13680 IEMOP_MNEMONIC("fsin st0");
13681 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
13682}
13683
13684
13685/** Opcode 0xd9 0xff. */
13686FNIEMOP_DEF(iemOp_fcos)
13687{
13688 IEMOP_MNEMONIC("fcos st0");
13689 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
13690}
13691
13692
13693/** Used by iemOp_EscF1. */
13694static const PFNIEMOP g_apfnEscF1_E0toFF[32] =
13695{
13696 /* 0xe0 */ iemOp_fchs,
13697 /* 0xe1 */ iemOp_fabs,
13698 /* 0xe2 */ iemOp_Invalid,
13699 /* 0xe3 */ iemOp_Invalid,
13700 /* 0xe4 */ iemOp_ftst,
13701 /* 0xe5 */ iemOp_fxam,
13702 /* 0xe6 */ iemOp_Invalid,
13703 /* 0xe7 */ iemOp_Invalid,
13704 /* 0xe8 */ iemOp_fld1,
13705 /* 0xe9 */ iemOp_fldl2t,
13706 /* 0xea */ iemOp_fldl2e,
13707 /* 0xeb */ iemOp_fldpi,
13708 /* 0xec */ iemOp_fldlg2,
13709 /* 0xed */ iemOp_fldln2,
13710 /* 0xee */ iemOp_fldz,
13711 /* 0xef */ iemOp_Invalid,
13712 /* 0xf0 */ iemOp_f2xm1,
13713 /* 0xf1 */ iemOp_fylx2,
13714 /* 0xf2 */ iemOp_fptan,
13715 /* 0xf3 */ iemOp_fpatan,
13716 /* 0xf4 */ iemOp_fxtract,
13717 /* 0xf5 */ iemOp_fprem1,
13718 /* 0xf6 */ iemOp_fdecstp,
13719 /* 0xf7 */ iemOp_fincstp,
13720 /* 0xf8 */ iemOp_fprem,
13721 /* 0xf9 */ iemOp_fyl2xp1,
13722 /* 0xfa */ iemOp_fsqrt,
13723 /* 0xfb */ iemOp_fsincos,
13724 /* 0xfc */ iemOp_frndint,
13725 /* 0xfd */ iemOp_fscale,
13726 /* 0xfe */ iemOp_fsin,
13727 /* 0xff */ iemOp_fcos
13728};
13729
13730
13731/** Opcode 0xd9. */
13732FNIEMOP_DEF(iemOp_EscF1)
13733{
13734 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13736 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13737 {
13738 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13739 {
13740 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
13741 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
13742 case 2:
13743 if (bRm == 0xc9)
13744 return FNIEMOP_CALL(iemOp_fnop);
13745 return IEMOP_RAISE_INVALID_OPCODE();
13746 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
13747 case 4:
13748 case 5:
13749 case 6:
13750 case 7:
13751 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
13752 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
13753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13754 }
13755 }
13756 else
13757 {
13758 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13759 {
13760 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
13761 case 1: return IEMOP_RAISE_INVALID_OPCODE();
13762 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
13763 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
13764 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
13765 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
13766 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
13767 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
13768 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13769 }
13770 }
13771}
13772
13773
13774/** Opcode 0xda 11/0. */
13775FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
13776{
13777 IEMOP_MNEMONIC("fcmovb st0,stN");
13778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13779
13780 IEM_MC_BEGIN(0, 1);
13781 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13782
13783 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13784 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13785
13786 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13787 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
13788 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13789 IEM_MC_ENDIF();
13790 IEM_MC_UPDATE_FPU_OPCODE_IP();
13791 IEM_MC_ELSE()
13792 IEM_MC_FPU_STACK_UNDERFLOW(0);
13793 IEM_MC_ENDIF();
13794 IEM_MC_USED_FPU();
13795 IEM_MC_ADVANCE_RIP();
13796
13797 IEM_MC_END();
13798 return VINF_SUCCESS;
13799}
13800
13801
13802/** Opcode 0xda 11/1. */
13803FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
13804{
13805 IEMOP_MNEMONIC("fcmove st0,stN");
13806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13807
13808 IEM_MC_BEGIN(0, 1);
13809 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13810
13811 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13812 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13813
13814 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13815 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
13816 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13817 IEM_MC_ENDIF();
13818 IEM_MC_UPDATE_FPU_OPCODE_IP();
13819 IEM_MC_ELSE()
13820 IEM_MC_FPU_STACK_UNDERFLOW(0);
13821 IEM_MC_ENDIF();
13822 IEM_MC_USED_FPU();
13823 IEM_MC_ADVANCE_RIP();
13824
13825 IEM_MC_END();
13826 return VINF_SUCCESS;
13827}
13828
13829
13830/** Opcode 0xda 11/2. */
13831FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
13832{
13833 IEMOP_MNEMONIC("fcmovbe st0,stN");
13834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13835
13836 IEM_MC_BEGIN(0, 1);
13837 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13838
13839 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13840 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13841
13842 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13843 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
13844 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13845 IEM_MC_ENDIF();
13846 IEM_MC_UPDATE_FPU_OPCODE_IP();
13847 IEM_MC_ELSE()
13848 IEM_MC_FPU_STACK_UNDERFLOW(0);
13849 IEM_MC_ENDIF();
13850 IEM_MC_USED_FPU();
13851 IEM_MC_ADVANCE_RIP();
13852
13853 IEM_MC_END();
13854 return VINF_SUCCESS;
13855}
13856
13857
13858/** Opcode 0xda 11/3. */
13859FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
13860{
13861 IEMOP_MNEMONIC("fcmovu st0,stN");
13862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13863
13864 IEM_MC_BEGIN(0, 1);
13865 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13866
13867 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13868 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13869
13870 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13871 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
13872 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13873 IEM_MC_ENDIF();
13874 IEM_MC_UPDATE_FPU_OPCODE_IP();
13875 IEM_MC_ELSE()
13876 IEM_MC_FPU_STACK_UNDERFLOW(0);
13877 IEM_MC_ENDIF();
13878 IEM_MC_USED_FPU();
13879 IEM_MC_ADVANCE_RIP();
13880
13881 IEM_MC_END();
13882 return VINF_SUCCESS;
13883}
13884
13885
13886/**
13887 * Common worker for FPU instructions working on ST0 and STn, only affecting
13888 * flags, and popping twice when done.
13889 *
13890 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13891 */
13892FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13893{
13894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13895
13896 IEM_MC_BEGIN(3, 1);
13897 IEM_MC_LOCAL(uint16_t, u16Fsw);
13898 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13899 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13900 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13901
13902 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13903 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13904 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
13905 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13906 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
13907 IEM_MC_ELSE()
13908 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
13909 IEM_MC_ENDIF();
13910 IEM_MC_USED_FPU();
13911 IEM_MC_ADVANCE_RIP();
13912
13913 IEM_MC_END();
13914 return VINF_SUCCESS;
13915}
13916
13917
13918/** Opcode 0xda 0xe9. */
13919FNIEMOP_DEF(iemOp_fucompp)
13920{
13921 IEMOP_MNEMONIC("fucompp st0,stN");
13922 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
13923}
13924
13925
13926/**
13927 * Common worker for FPU instructions working on ST0 and an m32i, and storing
13928 * the result in ST0.
13929 *
13930 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13931 */
13932FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
13933{
13934 IEM_MC_BEGIN(3, 3);
13935 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13936 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13937 IEM_MC_LOCAL(int32_t, i32Val2);
13938 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13939 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13940 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
13941
13942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13944
13945 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13946 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13947 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13948
13949 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13950 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
13951 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13952 IEM_MC_ELSE()
13953 IEM_MC_FPU_STACK_UNDERFLOW(0);
13954 IEM_MC_ENDIF();
13955 IEM_MC_USED_FPU();
13956 IEM_MC_ADVANCE_RIP();
13957
13958 IEM_MC_END();
13959 return VINF_SUCCESS;
13960}
13961
13962
13963/** Opcode 0xda !11/0. */
13964FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
13965{
13966 IEMOP_MNEMONIC("fiadd m32i");
13967 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
13968}
13969
13970
13971/** Opcode 0xda !11/1. */
13972FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
13973{
13974 IEMOP_MNEMONIC("fimul m32i");
13975 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
13976}
13977
13978
13979/** Opcode 0xda !11/2. */
13980FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
13981{
13982 IEMOP_MNEMONIC("ficom st0,m32i");
13983
13984 IEM_MC_BEGIN(3, 3);
13985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13986 IEM_MC_LOCAL(uint16_t, u16Fsw);
13987 IEM_MC_LOCAL(int32_t, i32Val2);
13988 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13989 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13990 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
13991
13992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13994
13995 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13996 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13997 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13998
13999 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14000 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14001 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14002 IEM_MC_ELSE()
14003 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14004 IEM_MC_ENDIF();
14005 IEM_MC_USED_FPU();
14006 IEM_MC_ADVANCE_RIP();
14007
14008 IEM_MC_END();
14009 return VINF_SUCCESS;
14010}
14011
14012
14013/** Opcode 0xda !11/3. */
14014FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14015{
14016 IEMOP_MNEMONIC("ficomp st0,m32i");
14017
14018 IEM_MC_BEGIN(3, 3);
14019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14020 IEM_MC_LOCAL(uint16_t, u16Fsw);
14021 IEM_MC_LOCAL(int32_t, i32Val2);
14022 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14023 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14024 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14025
14026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14028
14029 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14030 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14031 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14032
14033 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14034 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14035 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14036 IEM_MC_ELSE()
14037 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14038 IEM_MC_ENDIF();
14039 IEM_MC_USED_FPU();
14040 IEM_MC_ADVANCE_RIP();
14041
14042 IEM_MC_END();
14043 return VINF_SUCCESS;
14044}
14045
14046
14047/** Opcode 0xda !11/4. */
14048FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14049{
14050 IEMOP_MNEMONIC("fisub m32i");
14051 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14052}
14053
14054
14055/** Opcode 0xda !11/5. */
14056FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14057{
14058 IEMOP_MNEMONIC("fisubr m32i");
14059 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14060}
14061
14062
14063/** Opcode 0xda !11/6. */
14064FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14065{
14066 IEMOP_MNEMONIC("fidiv m32i");
14067 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14068}
14069
14070
14071/** Opcode 0xda !11/7. */
14072FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
14073{
14074 IEMOP_MNEMONIC("fidivr m32i");
14075 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
14076}
14077
14078
14079/** Opcode 0xda. */
14080FNIEMOP_DEF(iemOp_EscF2)
14081{
14082 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14083 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14084 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14085 {
14086 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14087 {
14088 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
14089 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
14090 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
14091 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
14092 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14093 case 5:
14094 if (bRm == 0xe9)
14095 return FNIEMOP_CALL(iemOp_fucompp);
14096 return IEMOP_RAISE_INVALID_OPCODE();
14097 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14098 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14099 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14100 }
14101 }
14102 else
14103 {
14104 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14105 {
14106 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
14107 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
14108 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
14109 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
14110 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
14111 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
14112 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
14113 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
14114 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14115 }
14116 }
14117}
14118
14119
14120/** Opcode 0xdb !11/0. */
14121FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
14122{
14123 IEMOP_MNEMONIC("fild m32i");
14124
14125 IEM_MC_BEGIN(2, 3);
14126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14127 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14128 IEM_MC_LOCAL(int32_t, i32Val);
14129 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14130 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
14131
14132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14134
14135 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14136 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14137 IEM_MC_FETCH_MEM_I32(i32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14138
14139 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14140 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
14141 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14142 IEM_MC_ELSE()
14143 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14144 IEM_MC_ENDIF();
14145 IEM_MC_USED_FPU();
14146 IEM_MC_ADVANCE_RIP();
14147
14148 IEM_MC_END();
14149 return VINF_SUCCESS;
14150}
14151
14152
14153/** Opcode 0xdb !11/1. */
14154FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
14155{
14156 IEMOP_MNEMONIC("fisttp m32i");
14157 IEM_MC_BEGIN(3, 2);
14158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14159 IEM_MC_LOCAL(uint16_t, u16Fsw);
14160 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14161 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14162 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14163
14164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14166 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14167 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14168
14169 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14170 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14171 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14172 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14173 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14174 IEM_MC_ELSE()
14175 IEM_MC_IF_FCW_IM()
14176 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14177 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14178 IEM_MC_ENDIF();
14179 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14180 IEM_MC_ENDIF();
14181 IEM_MC_USED_FPU();
14182 IEM_MC_ADVANCE_RIP();
14183
14184 IEM_MC_END();
14185 return VINF_SUCCESS;
14186}
14187
14188
14189/** Opcode 0xdb !11/2. */
14190FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
14191{
14192 IEMOP_MNEMONIC("fist m32i");
14193 IEM_MC_BEGIN(3, 2);
14194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14195 IEM_MC_LOCAL(uint16_t, u16Fsw);
14196 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14197 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14198 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14199
14200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14202 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14203 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14204
14205 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14206 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14207 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14208 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14209 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14210 IEM_MC_ELSE()
14211 IEM_MC_IF_FCW_IM()
14212 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14213 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14214 IEM_MC_ENDIF();
14215 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14216 IEM_MC_ENDIF();
14217 IEM_MC_USED_FPU();
14218 IEM_MC_ADVANCE_RIP();
14219
14220 IEM_MC_END();
14221 return VINF_SUCCESS;
14222}
14223
14224
14225/** Opcode 0xdb !11/3. */
14226FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
14227{
14228 IEMOP_MNEMONIC("fisttp m32i");
14229 IEM_MC_BEGIN(3, 2);
14230 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14231 IEM_MC_LOCAL(uint16_t, u16Fsw);
14232 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14233 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14234 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14235
14236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14238 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14239 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14240
14241 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14242 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14243 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14244 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14245 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14246 IEM_MC_ELSE()
14247 IEM_MC_IF_FCW_IM()
14248 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14249 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14250 IEM_MC_ENDIF();
14251 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14252 IEM_MC_ENDIF();
14253 IEM_MC_USED_FPU();
14254 IEM_MC_ADVANCE_RIP();
14255
14256 IEM_MC_END();
14257 return VINF_SUCCESS;
14258}
14259
14260
14261/** Opcode 0xdb !11/5. */
14262FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
14263{
14264 IEMOP_MNEMONIC("fld m80r");
14265
14266 IEM_MC_BEGIN(2, 3);
14267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14268 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14269 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
14270 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14271 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
14272
14273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14275
14276 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14277 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14278 IEM_MC_FETCH_MEM_R80(r80Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14279
14280 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14281 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
14282 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14283 IEM_MC_ELSE()
14284 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14285 IEM_MC_ENDIF();
14286 IEM_MC_USED_FPU();
14287 IEM_MC_ADVANCE_RIP();
14288
14289 IEM_MC_END();
14290 return VINF_SUCCESS;
14291}
14292
14293
14294/** Opcode 0xdb !11/7. */
14295FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
14296{
14297 IEMOP_MNEMONIC("fstp m80r");
14298 IEM_MC_BEGIN(3, 2);
14299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14300 IEM_MC_LOCAL(uint16_t, u16Fsw);
14301 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14302 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
14303 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14304
14305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14307 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14308 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14309
14310 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14311 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14312 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
14313 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
14314 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14315 IEM_MC_ELSE()
14316 IEM_MC_IF_FCW_IM()
14317 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
14318 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
14319 IEM_MC_ENDIF();
14320 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14321 IEM_MC_ENDIF();
14322 IEM_MC_USED_FPU();
14323 IEM_MC_ADVANCE_RIP();
14324
14325 IEM_MC_END();
14326 return VINF_SUCCESS;
14327}
14328
14329
14330/** Opcode 0xdb 11/0. */
14331FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
14332{
14333 IEMOP_MNEMONIC("fcmovnb st0,stN");
14334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14335
14336 IEM_MC_BEGIN(0, 1);
14337 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14338
14339 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14340 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14341
14342 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14343 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
14344 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14345 IEM_MC_ENDIF();
14346 IEM_MC_UPDATE_FPU_OPCODE_IP();
14347 IEM_MC_ELSE()
14348 IEM_MC_FPU_STACK_UNDERFLOW(0);
14349 IEM_MC_ENDIF();
14350 IEM_MC_USED_FPU();
14351 IEM_MC_ADVANCE_RIP();
14352
14353 IEM_MC_END();
14354 return VINF_SUCCESS;
14355}
14356
14357
14358/** Opcode 0xdb 11/1. */
14359FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
14360{
14361 IEMOP_MNEMONIC("fcmovne st0,stN");
14362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14363
14364 IEM_MC_BEGIN(0, 1);
14365 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14366
14367 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14368 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14369
14370 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14371 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
14372 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14373 IEM_MC_ENDIF();
14374 IEM_MC_UPDATE_FPU_OPCODE_IP();
14375 IEM_MC_ELSE()
14376 IEM_MC_FPU_STACK_UNDERFLOW(0);
14377 IEM_MC_ENDIF();
14378 IEM_MC_USED_FPU();
14379 IEM_MC_ADVANCE_RIP();
14380
14381 IEM_MC_END();
14382 return VINF_SUCCESS;
14383}
14384
14385
14386/** Opcode 0xdb 11/2. */
14387FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
14388{
14389 IEMOP_MNEMONIC("fcmovnbe st0,stN");
14390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14391
14392 IEM_MC_BEGIN(0, 1);
14393 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14394
14395 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14396 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14397
14398 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14399 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14400 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14401 IEM_MC_ENDIF();
14402 IEM_MC_UPDATE_FPU_OPCODE_IP();
14403 IEM_MC_ELSE()
14404 IEM_MC_FPU_STACK_UNDERFLOW(0);
14405 IEM_MC_ENDIF();
14406 IEM_MC_USED_FPU();
14407 IEM_MC_ADVANCE_RIP();
14408
14409 IEM_MC_END();
14410 return VINF_SUCCESS;
14411}
14412
14413
14414/** Opcode 0xdb 11/3. */
14415FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
14416{
14417 IEMOP_MNEMONIC("fcmovnnu st0,stN");
14418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14419
14420 IEM_MC_BEGIN(0, 1);
14421 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14422
14423 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14424 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14425
14426 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14427 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
14428 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14429 IEM_MC_ENDIF();
14430 IEM_MC_UPDATE_FPU_OPCODE_IP();
14431 IEM_MC_ELSE()
14432 IEM_MC_FPU_STACK_UNDERFLOW(0);
14433 IEM_MC_ENDIF();
14434 IEM_MC_USED_FPU();
14435 IEM_MC_ADVANCE_RIP();
14436
14437 IEM_MC_END();
14438 return VINF_SUCCESS;
14439}
14440
14441
14442/** Opcode 0xdb 0xe0. */
14443FNIEMOP_DEF(iemOp_fneni)
14444{
14445 IEMOP_MNEMONIC("fneni (8087/ign)");
14446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14447 IEM_MC_BEGIN(0,0);
14448 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14449 IEM_MC_ADVANCE_RIP();
14450 IEM_MC_END();
14451 return VINF_SUCCESS;
14452}
14453
14454
14455/** Opcode 0xdb 0xe1. */
14456FNIEMOP_DEF(iemOp_fndisi)
14457{
14458 IEMOP_MNEMONIC("fndisi (8087/ign)");
14459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14460 IEM_MC_BEGIN(0,0);
14461 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14462 IEM_MC_ADVANCE_RIP();
14463 IEM_MC_END();
14464 return VINF_SUCCESS;
14465}
14466
14467
14468/** Opcode 0xdb 0xe2. */
14469FNIEMOP_DEF(iemOp_fnclex)
14470{
14471 IEMOP_MNEMONIC("fnclex");
14472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14473
14474 IEM_MC_BEGIN(0,0);
14475 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14476 IEM_MC_CLEAR_FSW_EX();
14477 IEM_MC_ADVANCE_RIP();
14478 IEM_MC_END();
14479 return VINF_SUCCESS;
14480}
14481
14482
14483/** Opcode 0xdb 0xe3. */
14484FNIEMOP_DEF(iemOp_fninit)
14485{
14486 IEMOP_MNEMONIC("fninit");
14487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14488 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
14489}
14490
14491
14492/** Opcode 0xdb 0xe4. */
14493FNIEMOP_DEF(iemOp_fnsetpm)
14494{
14495 IEMOP_MNEMONIC("fnsetpm (80287/ign)"); /* set protected mode on fpu. */
14496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14497 IEM_MC_BEGIN(0,0);
14498 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14499 IEM_MC_ADVANCE_RIP();
14500 IEM_MC_END();
14501 return VINF_SUCCESS;
14502}
14503
14504
14505/** Opcode 0xdb 0xe5. */
14506FNIEMOP_DEF(iemOp_frstpm)
14507{
14508 IEMOP_MNEMONIC("frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
14509#if 0 /* #UDs on newer CPUs */
14510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14511 IEM_MC_BEGIN(0,0);
14512 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14513 IEM_MC_ADVANCE_RIP();
14514 IEM_MC_END();
14515 return VINF_SUCCESS;
14516#else
14517 return IEMOP_RAISE_INVALID_OPCODE();
14518#endif
14519}
14520
14521
14522/** Opcode 0xdb 11/5. */
14523FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
14524{
14525 IEMOP_MNEMONIC("fucomi st0,stN");
14526 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
14527}
14528
14529
14530/** Opcode 0xdb 11/6. */
14531FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
14532{
14533 IEMOP_MNEMONIC("fcomi st0,stN");
14534 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
14535}
14536
14537
14538/** Opcode 0xdb. */
14539FNIEMOP_DEF(iemOp_EscF3)
14540{
14541 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14542 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14543 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14544 {
14545 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14546 {
14547 case 0: FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
14548 case 1: FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
14549 case 2: FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
14550 case 3: FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
14551 case 4:
14552 switch (bRm)
14553 {
14554 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
14555 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
14556 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
14557 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
14558 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
14559 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
14560 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
14561 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
14562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14563 }
14564 break;
14565 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
14566 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
14567 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14568 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14569 }
14570 }
14571 else
14572 {
14573 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14574 {
14575 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
14576 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
14577 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
14578 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
14579 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14580 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
14581 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14582 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
14583 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14584 }
14585 }
14586}
14587
14588
14589/**
14590 * Common worker for FPU instructions working on STn and ST0, and storing the
14591 * result in STn unless IE, DE or ZE was raised.
14592 *
14593 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14594 */
14595FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14596{
14597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14598
14599 IEM_MC_BEGIN(3, 1);
14600 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14601 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14602 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14603 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14604
14605 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14606 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14607
14608 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14609 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14610 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
14611 IEM_MC_ELSE()
14612 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
14613 IEM_MC_ENDIF();
14614 IEM_MC_USED_FPU();
14615 IEM_MC_ADVANCE_RIP();
14616
14617 IEM_MC_END();
14618 return VINF_SUCCESS;
14619}
14620
14621
14622/** Opcode 0xdc 11/0. */
14623FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
14624{
14625 IEMOP_MNEMONIC("fadd stN,st0");
14626 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
14627}
14628
14629
14630/** Opcode 0xdc 11/1. */
14631FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
14632{
14633 IEMOP_MNEMONIC("fmul stN,st0");
14634 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
14635}
14636
14637
14638/** Opcode 0xdc 11/4. */
14639FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
14640{
14641 IEMOP_MNEMONIC("fsubr stN,st0");
14642 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
14643}
14644
14645
14646/** Opcode 0xdc 11/5. */
14647FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
14648{
14649 IEMOP_MNEMONIC("fsub stN,st0");
14650 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
14651}
14652
14653
14654/** Opcode 0xdc 11/6. */
14655FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
14656{
14657 IEMOP_MNEMONIC("fdivr stN,st0");
14658 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
14659}
14660
14661
14662/** Opcode 0xdc 11/7. */
14663FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
14664{
14665 IEMOP_MNEMONIC("fdiv stN,st0");
14666 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
14667}
14668
14669
14670/**
14671 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
14672 * memory operand, and storing the result in ST0.
14673 *
14674 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14675 */
14676FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
14677{
14678 IEM_MC_BEGIN(3, 3);
14679 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14680 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14681 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
14682 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14683 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
14684 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
14685
14686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14688 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14689 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14690
14691 IEM_MC_FETCH_MEM_R64(r64Factor2, pIemCpu->iEffSeg, GCPtrEffSrc);
14692 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
14693 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
14694 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pIemCpu->iEffSeg, GCPtrEffSrc);
14695 IEM_MC_ELSE()
14696 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pIemCpu->iEffSeg, GCPtrEffSrc);
14697 IEM_MC_ENDIF();
14698 IEM_MC_USED_FPU();
14699 IEM_MC_ADVANCE_RIP();
14700
14701 IEM_MC_END();
14702 return VINF_SUCCESS;
14703}
14704
14705
14706/** Opcode 0xdc !11/0. */
14707FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
14708{
14709 IEMOP_MNEMONIC("fadd m64r");
14710 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
14711}
14712
14713
14714/** Opcode 0xdc !11/1. */
14715FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
14716{
14717 IEMOP_MNEMONIC("fmul m64r");
14718 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
14719}
14720
14721
14722/** Opcode 0xdc !11/2. */
14723FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
14724{
14725 IEMOP_MNEMONIC("fcom st0,m64r");
14726
14727 IEM_MC_BEGIN(3, 3);
14728 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14729 IEM_MC_LOCAL(uint16_t, u16Fsw);
14730 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
14731 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14732 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14733 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
14734
14735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14737
14738 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14739 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14740 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14741
14742 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14743 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
14744 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14745 IEM_MC_ELSE()
14746 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14747 IEM_MC_ENDIF();
14748 IEM_MC_USED_FPU();
14749 IEM_MC_ADVANCE_RIP();
14750
14751 IEM_MC_END();
14752 return VINF_SUCCESS;
14753}
14754
14755
14756/** Opcode 0xdc !11/3. */
14757FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
14758{
14759 IEMOP_MNEMONIC("fcomp st0,m64r");
14760
14761 IEM_MC_BEGIN(3, 3);
14762 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14763 IEM_MC_LOCAL(uint16_t, u16Fsw);
14764 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
14765 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14766 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14767 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
14768
14769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14771
14772 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14773 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14774 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14775
14776 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14777 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
14778 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14779 IEM_MC_ELSE()
14780 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14781 IEM_MC_ENDIF();
14782 IEM_MC_USED_FPU();
14783 IEM_MC_ADVANCE_RIP();
14784
14785 IEM_MC_END();
14786 return VINF_SUCCESS;
14787}
14788
14789
14790/** Opcode 0xdc !11/4. */
14791FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
14792{
14793 IEMOP_MNEMONIC("fsub m64r");
14794 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
14795}
14796
14797
14798/** Opcode 0xdc !11/5. */
14799FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
14800{
14801 IEMOP_MNEMONIC("fsubr m64r");
14802 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
14803}
14804
14805
14806/** Opcode 0xdc !11/6. */
14807FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
14808{
14809 IEMOP_MNEMONIC("fdiv m64r");
14810 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
14811}
14812
14813
14814/** Opcode 0xdc !11/7. */
14815FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
14816{
14817 IEMOP_MNEMONIC("fdivr m64r");
14818 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
14819}
14820
14821
14822/** Opcode 0xdc. */
14823FNIEMOP_DEF(iemOp_EscF4)
14824{
14825 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14826 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14827 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14828 {
14829 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14830 {
14831 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
14832 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
14833 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
14834 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
14835 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
14836 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
14837 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
14838 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
14839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14840 }
14841 }
14842 else
14843 {
14844 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14845 {
14846 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
14847 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
14848 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
14849 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
14850 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
14851 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
14852 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
14853 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
14854 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14855 }
14856 }
14857}
14858
14859
14860/** Opcode 0xdd !11/0.
14861 * @sa iemOp_fld_m32r */
14862FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
14863{
14864 IEMOP_MNEMONIC("fld m64r");
14865
14866 IEM_MC_BEGIN(2, 3);
14867 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14868 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14869 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
14870 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14871 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
14872
14873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14875 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14876 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14877
14878 IEM_MC_FETCH_MEM_R64(r64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14879 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14880 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
14881 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14882 IEM_MC_ELSE()
14883 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14884 IEM_MC_ENDIF();
14885 IEM_MC_USED_FPU();
14886 IEM_MC_ADVANCE_RIP();
14887
14888 IEM_MC_END();
14889 return VINF_SUCCESS;
14890}
14891
14892
14893/** Opcode 0xdd !11/0. */
14894FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
14895{
14896 IEMOP_MNEMONIC("fisttp m64i");
14897 IEM_MC_BEGIN(3, 2);
14898 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14899 IEM_MC_LOCAL(uint16_t, u16Fsw);
14900 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14901 IEM_MC_ARG(int64_t *, pi64Dst, 1);
14902 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14903
14904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14906 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14907 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14908
14909 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14910 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14911 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
14912 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
14913 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14914 IEM_MC_ELSE()
14915 IEM_MC_IF_FCW_IM()
14916 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
14917 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
14918 IEM_MC_ENDIF();
14919 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14920 IEM_MC_ENDIF();
14921 IEM_MC_USED_FPU();
14922 IEM_MC_ADVANCE_RIP();
14923
14924 IEM_MC_END();
14925 return VINF_SUCCESS;
14926}
14927
14928
14929/** Opcode 0xdd !11/0. */
14930FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
14931{
14932 IEMOP_MNEMONIC("fst m64r");
14933 IEM_MC_BEGIN(3, 2);
14934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14935 IEM_MC_LOCAL(uint16_t, u16Fsw);
14936 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14937 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
14938 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14939
14940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14942 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14943 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14944
14945 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14946 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14947 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
14948 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
14949 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14950 IEM_MC_ELSE()
14951 IEM_MC_IF_FCW_IM()
14952 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
14953 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
14954 IEM_MC_ENDIF();
14955 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14956 IEM_MC_ENDIF();
14957 IEM_MC_USED_FPU();
14958 IEM_MC_ADVANCE_RIP();
14959
14960 IEM_MC_END();
14961 return VINF_SUCCESS;
14962}
14963
14964
14965
14966
14967/** Opcode 0xdd !11/0. */
14968FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
14969{
14970 IEMOP_MNEMONIC("fstp m64r");
14971 IEM_MC_BEGIN(3, 2);
14972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14973 IEM_MC_LOCAL(uint16_t, u16Fsw);
14974 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14975 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
14976 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14977
14978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14980 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14981 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14982
14983 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14984 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14985 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
14986 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
14987 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14988 IEM_MC_ELSE()
14989 IEM_MC_IF_FCW_IM()
14990 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
14991 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
14992 IEM_MC_ENDIF();
14993 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14994 IEM_MC_ENDIF();
14995 IEM_MC_USED_FPU();
14996 IEM_MC_ADVANCE_RIP();
14997
14998 IEM_MC_END();
14999 return VINF_SUCCESS;
15000}
15001
15002
15003/** Opcode 0xdd !11/0. */
15004FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15005{
15006 IEMOP_MNEMONIC("fxrstor m94/108byte");
15007 IEM_MC_BEGIN(3, 0);
15008 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15009 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pIemCpu->iEffSeg, 1);
15010 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15013 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15014 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15015 IEM_MC_END();
15016 return VINF_SUCCESS;
15017}
15018
15019
15020/** Opcode 0xdd !11/0. */
15021FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15022{
15023 IEMOP_MNEMONIC("fnsave m94/108byte");
15024 IEM_MC_BEGIN(3, 0);
15025 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15026 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pIemCpu->iEffSeg, 1);
15027 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15030 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15031 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15032 IEM_MC_END();
15033 return VINF_SUCCESS;
15034
15035}
15036
15037/** Opcode 0xdd !11/0. */
15038FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15039{
15040 IEMOP_MNEMONIC("fnstsw m16");
15041
15042 IEM_MC_BEGIN(0, 2);
15043 IEM_MC_LOCAL(uint16_t, u16Tmp);
15044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15045
15046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15048 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15049
15050 IEM_MC_FETCH_FSW(u16Tmp);
15051 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
15052 IEM_MC_ADVANCE_RIP();
15053
15054/** @todo Debug / drop a hint to the verifier that things may differ
15055 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15056 * NT4SP1. (X86_FSW_PE) */
15057 IEM_MC_END();
15058 return VINF_SUCCESS;
15059}
15060
15061
15062/** Opcode 0xdd 11/0. */
15063FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15064{
15065 IEMOP_MNEMONIC("ffree stN");
15066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15067 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
15068 unmodified. */
15069
15070 IEM_MC_BEGIN(0, 0);
15071
15072 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15073 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15074
15075 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15076 IEM_MC_UPDATE_FPU_OPCODE_IP();
15077
15078 IEM_MC_USED_FPU();
15079 IEM_MC_ADVANCE_RIP();
15080 IEM_MC_END();
15081 return VINF_SUCCESS;
15082}
15083
15084
15085/** Opcode 0xdd 11/1. */
15086FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
15087{
15088 IEMOP_MNEMONIC("fst st0,stN");
15089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15090
15091 IEM_MC_BEGIN(0, 2);
15092 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15093 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15094 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15095 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15096 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15097 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15098 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15099 IEM_MC_ELSE()
15100 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15101 IEM_MC_ENDIF();
15102 IEM_MC_USED_FPU();
15103 IEM_MC_ADVANCE_RIP();
15104 IEM_MC_END();
15105 return VINF_SUCCESS;
15106}
15107
15108
15109/** Opcode 0xdd 11/3. */
15110FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
15111{
15112 IEMOP_MNEMONIC("fcom st0,stN");
15113 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
15114}
15115
15116
15117/** Opcode 0xdd 11/4. */
15118FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
15119{
15120 IEMOP_MNEMONIC("fcomp st0,stN");
15121 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
15122}
15123
15124
15125/** Opcode 0xdd. */
15126FNIEMOP_DEF(iemOp_EscF5)
15127{
15128 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15130 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15131 {
15132 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15133 {
15134 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
15135 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
15136 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
15137 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
15138 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
15139 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
15140 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15141 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15142 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15143 }
15144 }
15145 else
15146 {
15147 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15148 {
15149 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
15150 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
15151 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
15152 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
15153 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
15154 case 5: return IEMOP_RAISE_INVALID_OPCODE();
15155 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
15156 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
15157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15158 }
15159 }
15160}
15161
15162
15163/** Opcode 0xde 11/0. */
15164FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
15165{
15166 IEMOP_MNEMONIC("faddp stN,st0");
15167 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
15168}
15169
15170
15171/** Opcode 0xde 11/0. */
15172FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
15173{
15174 IEMOP_MNEMONIC("fmulp stN,st0");
15175 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
15176}
15177
15178
15179/** Opcode 0xde 0xd9. */
15180FNIEMOP_DEF(iemOp_fcompp)
15181{
15182 IEMOP_MNEMONIC("fucompp st0,stN");
15183 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
15184}
15185
15186
15187/** Opcode 0xde 11/4. */
15188FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
15189{
15190 IEMOP_MNEMONIC("fsubrp stN,st0");
15191 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
15192}
15193
15194
15195/** Opcode 0xde 11/5. */
15196FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
15197{
15198 IEMOP_MNEMONIC("fsubp stN,st0");
15199 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
15200}
15201
15202
15203/** Opcode 0xde 11/6. */
15204FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
15205{
15206 IEMOP_MNEMONIC("fdivrp stN,st0");
15207 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
15208}
15209
15210
15211/** Opcode 0xde 11/7. */
15212FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
15213{
15214 IEMOP_MNEMONIC("fdivp stN,st0");
15215 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
15216}
15217
15218
15219/**
15220 * Common worker for FPU instructions working on ST0 and an m16i, and storing
15221 * the result in ST0.
15222 *
15223 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15224 */
15225FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
15226{
15227 IEM_MC_BEGIN(3, 3);
15228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15229 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15230 IEM_MC_LOCAL(int16_t, i16Val2);
15231 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15232 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15233 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15234
15235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15237
15238 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15239 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15240 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15241
15242 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15243 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
15244 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15245 IEM_MC_ELSE()
15246 IEM_MC_FPU_STACK_UNDERFLOW(0);
15247 IEM_MC_ENDIF();
15248 IEM_MC_USED_FPU();
15249 IEM_MC_ADVANCE_RIP();
15250
15251 IEM_MC_END();
15252 return VINF_SUCCESS;
15253}
15254
15255
15256/** Opcode 0xde !11/0. */
15257FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
15258{
15259 IEMOP_MNEMONIC("fiadd m16i");
15260 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
15261}
15262
15263
15264/** Opcode 0xde !11/1. */
15265FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
15266{
15267 IEMOP_MNEMONIC("fimul m16i");
15268 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
15269}
15270
15271
15272/** Opcode 0xde !11/2. */
15273FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
15274{
15275 IEMOP_MNEMONIC("ficom st0,m16i");
15276
15277 IEM_MC_BEGIN(3, 3);
15278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15279 IEM_MC_LOCAL(uint16_t, u16Fsw);
15280 IEM_MC_LOCAL(int16_t, i16Val2);
15281 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15282 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15283 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15284
15285 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15287
15288 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15289 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15290 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15291
15292 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15293 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15294 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15295 IEM_MC_ELSE()
15296 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15297 IEM_MC_ENDIF();
15298 IEM_MC_USED_FPU();
15299 IEM_MC_ADVANCE_RIP();
15300
15301 IEM_MC_END();
15302 return VINF_SUCCESS;
15303}
15304
15305
15306/** Opcode 0xde !11/3. */
15307FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
15308{
15309 IEMOP_MNEMONIC("ficomp st0,m16i");
15310
15311 IEM_MC_BEGIN(3, 3);
15312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15313 IEM_MC_LOCAL(uint16_t, u16Fsw);
15314 IEM_MC_LOCAL(int16_t, i16Val2);
15315 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15316 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15317 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15318
15319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15321
15322 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15323 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15324 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15325
15326 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15327 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15328 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15329 IEM_MC_ELSE()
15330 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15331 IEM_MC_ENDIF();
15332 IEM_MC_USED_FPU();
15333 IEM_MC_ADVANCE_RIP();
15334
15335 IEM_MC_END();
15336 return VINF_SUCCESS;
15337}
15338
15339
15340/** Opcode 0xde !11/4. */
15341FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
15342{
15343 IEMOP_MNEMONIC("fisub m16i");
15344 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
15345}
15346
15347
15348/** Opcode 0xde !11/5. */
15349FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
15350{
15351 IEMOP_MNEMONIC("fisubr m16i");
15352 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
15353}
15354
15355
15356/** Opcode 0xde !11/6. */
15357FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
15358{
15359 IEMOP_MNEMONIC("fiadd m16i");
15360 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
15361}
15362
15363
15364/** Opcode 0xde !11/7. */
15365FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
15366{
15367 IEMOP_MNEMONIC("fiadd m16i");
15368 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
15369}
15370
15371
15372/** Opcode 0xde. */
15373FNIEMOP_DEF(iemOp_EscF6)
15374{
15375 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15376 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15377 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15378 {
15379 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15380 {
15381 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
15382 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
15383 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
15384 case 3: if (bRm == 0xd9)
15385 return FNIEMOP_CALL(iemOp_fcompp);
15386 return IEMOP_RAISE_INVALID_OPCODE();
15387 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
15388 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
15389 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
15390 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
15391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15392 }
15393 }
15394 else
15395 {
15396 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15397 {
15398 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
15399 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
15400 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
15401 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
15402 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
15403 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
15404 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
15405 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
15406 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15407 }
15408 }
15409}
15410
15411
15412/** Opcode 0xdf 11/0.
15413 * Undocument instruction, assumed to work like ffree + fincstp. */
15414FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
15415{
15416 IEMOP_MNEMONIC("ffreep stN");
15417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15418
15419 IEM_MC_BEGIN(0, 0);
15420
15421 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15422 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15423
15424 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15425 IEM_MC_FPU_STACK_INC_TOP();
15426 IEM_MC_UPDATE_FPU_OPCODE_IP();
15427
15428 IEM_MC_USED_FPU();
15429 IEM_MC_ADVANCE_RIP();
15430 IEM_MC_END();
15431 return VINF_SUCCESS;
15432}
15433
15434
15435/** Opcode 0xdf 0xe0. */
15436FNIEMOP_DEF(iemOp_fnstsw_ax)
15437{
15438 IEMOP_MNEMONIC("fnstsw ax");
15439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15440
15441 IEM_MC_BEGIN(0, 1);
15442 IEM_MC_LOCAL(uint16_t, u16Tmp);
15443 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15444 IEM_MC_FETCH_FSW(u16Tmp);
15445 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
15446 IEM_MC_ADVANCE_RIP();
15447 IEM_MC_END();
15448 return VINF_SUCCESS;
15449}
15450
15451
15452/** Opcode 0xdf 11/5. */
15453FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
15454{
15455 IEMOP_MNEMONIC("fcomip st0,stN");
15456 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15457}
15458
15459
15460/** Opcode 0xdf 11/6. */
15461FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
15462{
15463 IEMOP_MNEMONIC("fcomip st0,stN");
15464 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15465}
15466
15467
15468/** Opcode 0xdf !11/0. */
15469FNIEMOP_STUB_1(iemOp_fild_m16i, uint8_t, bRm);
15470
15471
15472/** Opcode 0xdf !11/1. */
15473FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
15474{
15475 IEMOP_MNEMONIC("fisttp m16i");
15476 IEM_MC_BEGIN(3, 2);
15477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15478 IEM_MC_LOCAL(uint16_t, u16Fsw);
15479 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15480 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15481 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15482
15483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15485 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15486 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15487
15488 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15489 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15490 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15491 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15492 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15493 IEM_MC_ELSE()
15494 IEM_MC_IF_FCW_IM()
15495 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15496 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15497 IEM_MC_ENDIF();
15498 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15499 IEM_MC_ENDIF();
15500 IEM_MC_USED_FPU();
15501 IEM_MC_ADVANCE_RIP();
15502
15503 IEM_MC_END();
15504 return VINF_SUCCESS;
15505}
15506
15507
15508/** Opcode 0xdf !11/2. */
15509FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
15510{
15511 IEMOP_MNEMONIC("fistp m16i");
15512 IEM_MC_BEGIN(3, 2);
15513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15514 IEM_MC_LOCAL(uint16_t, u16Fsw);
15515 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15516 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15517 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15518
15519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15521 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15522 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15523
15524 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15525 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15526 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15527 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15528 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15529 IEM_MC_ELSE()
15530 IEM_MC_IF_FCW_IM()
15531 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15532 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15533 IEM_MC_ENDIF();
15534 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15535 IEM_MC_ENDIF();
15536 IEM_MC_USED_FPU();
15537 IEM_MC_ADVANCE_RIP();
15538
15539 IEM_MC_END();
15540 return VINF_SUCCESS;
15541}
15542
15543
15544/** Opcode 0xdf !11/3. */
15545FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
15546{
15547 IEMOP_MNEMONIC("fistp m16i");
15548 IEM_MC_BEGIN(3, 2);
15549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15550 IEM_MC_LOCAL(uint16_t, u16Fsw);
15551 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15552 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15553 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15554
15555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15557 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15558 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15559
15560 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15561 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15562 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15563 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15564 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15565 IEM_MC_ELSE()
15566 IEM_MC_IF_FCW_IM()
15567 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15568 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15569 IEM_MC_ENDIF();
15570 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15571 IEM_MC_ENDIF();
15572 IEM_MC_USED_FPU();
15573 IEM_MC_ADVANCE_RIP();
15574
15575 IEM_MC_END();
15576 return VINF_SUCCESS;
15577}
15578
15579
15580/** Opcode 0xdf !11/4. */
15581FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
15582
15583/** Opcode 0xdf !11/5. */
15584FNIEMOP_STUB_1(iemOp_fild_m64i, uint8_t, bRm);
15585
15586/** Opcode 0xdf !11/6. */
15587FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
15588
15589
15590/** Opcode 0xdf !11/7. */
15591FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
15592{
15593 IEMOP_MNEMONIC("fistp m64i");
15594 IEM_MC_BEGIN(3, 2);
15595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15596 IEM_MC_LOCAL(uint16_t, u16Fsw);
15597 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15598 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15599 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15600
15601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15603 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15604 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15605
15606 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15607 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15608 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15609 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15610 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15611 IEM_MC_ELSE()
15612 IEM_MC_IF_FCW_IM()
15613 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15614 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15615 IEM_MC_ENDIF();
15616 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15617 IEM_MC_ENDIF();
15618 IEM_MC_USED_FPU();
15619 IEM_MC_ADVANCE_RIP();
15620
15621 IEM_MC_END();
15622 return VINF_SUCCESS;
15623}
15624
15625
15626/** Opcode 0xdf. */
15627FNIEMOP_DEF(iemOp_EscF7)
15628{
15629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15630 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15631 {
15632 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15633 {
15634 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
15635 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
15636 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
15637 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
15638 case 4: if (bRm == 0xe0)
15639 return FNIEMOP_CALL(iemOp_fnstsw_ax);
15640 return IEMOP_RAISE_INVALID_OPCODE();
15641 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
15642 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
15643 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15644 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15645 }
15646 }
15647 else
15648 {
15649 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15650 {
15651 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
15652 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
15653 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
15654 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
15655 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
15656 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
15657 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
15658 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
15659 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15660 }
15661 }
15662}
15663
15664
15665/** Opcode 0xe0. */
15666FNIEMOP_DEF(iemOp_loopne_Jb)
15667{
15668 IEMOP_MNEMONIC("loopne Jb");
15669 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15670 IEMOP_HLP_NO_LOCK_PREFIX();
15671 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15672
15673 switch (pIemCpu->enmEffAddrMode)
15674 {
15675 case IEMMODE_16BIT:
15676 IEM_MC_BEGIN(0,0);
15677 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
15678 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15679 IEM_MC_REL_JMP_S8(i8Imm);
15680 } IEM_MC_ELSE() {
15681 IEM_MC_ADVANCE_RIP();
15682 } IEM_MC_ENDIF();
15683 IEM_MC_END();
15684 return VINF_SUCCESS;
15685
15686 case IEMMODE_32BIT:
15687 IEM_MC_BEGIN(0,0);
15688 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
15689 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15690 IEM_MC_REL_JMP_S8(i8Imm);
15691 } IEM_MC_ELSE() {
15692 IEM_MC_ADVANCE_RIP();
15693 } IEM_MC_ENDIF();
15694 IEM_MC_END();
15695 return VINF_SUCCESS;
15696
15697 case IEMMODE_64BIT:
15698 IEM_MC_BEGIN(0,0);
15699 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
15700 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15701 IEM_MC_REL_JMP_S8(i8Imm);
15702 } IEM_MC_ELSE() {
15703 IEM_MC_ADVANCE_RIP();
15704 } IEM_MC_ENDIF();
15705 IEM_MC_END();
15706 return VINF_SUCCESS;
15707
15708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15709 }
15710}
15711
15712
15713/** Opcode 0xe1. */
15714FNIEMOP_DEF(iemOp_loope_Jb)
15715{
15716 IEMOP_MNEMONIC("loope Jb");
15717 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15718 IEMOP_HLP_NO_LOCK_PREFIX();
15719 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15720
15721 switch (pIemCpu->enmEffAddrMode)
15722 {
15723 case IEMMODE_16BIT:
15724 IEM_MC_BEGIN(0,0);
15725 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
15726 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
15727 IEM_MC_REL_JMP_S8(i8Imm);
15728 } IEM_MC_ELSE() {
15729 IEM_MC_ADVANCE_RIP();
15730 } IEM_MC_ENDIF();
15731 IEM_MC_END();
15732 return VINF_SUCCESS;
15733
15734 case IEMMODE_32BIT:
15735 IEM_MC_BEGIN(0,0);
15736 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
15737 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
15738 IEM_MC_REL_JMP_S8(i8Imm);
15739 } IEM_MC_ELSE() {
15740 IEM_MC_ADVANCE_RIP();
15741 } IEM_MC_ENDIF();
15742 IEM_MC_END();
15743 return VINF_SUCCESS;
15744
15745 case IEMMODE_64BIT:
15746 IEM_MC_BEGIN(0,0);
15747 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
15748 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
15749 IEM_MC_REL_JMP_S8(i8Imm);
15750 } IEM_MC_ELSE() {
15751 IEM_MC_ADVANCE_RIP();
15752 } IEM_MC_ENDIF();
15753 IEM_MC_END();
15754 return VINF_SUCCESS;
15755
15756 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15757 }
15758}
15759
15760
15761/** Opcode 0xe2. */
15762FNIEMOP_DEF(iemOp_loop_Jb)
15763{
15764 IEMOP_MNEMONIC("loop Jb");
15765 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15766 IEMOP_HLP_NO_LOCK_PREFIX();
15767 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15768
15769 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
15770 * using the 32-bit operand size override. How can that be restarted? See
15771 * weird pseudo code in intel manual. */
15772 switch (pIemCpu->enmEffAddrMode)
15773 {
15774 case IEMMODE_16BIT:
15775 IEM_MC_BEGIN(0,0);
15776 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
15777 {
15778 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
15779 IEM_MC_IF_CX_IS_NZ() {
15780 IEM_MC_REL_JMP_S8(i8Imm);
15781 } IEM_MC_ELSE() {
15782 IEM_MC_ADVANCE_RIP();
15783 } IEM_MC_ENDIF();
15784 }
15785 else
15786 {
15787 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
15788 IEM_MC_ADVANCE_RIP();
15789 }
15790 IEM_MC_END();
15791 return VINF_SUCCESS;
15792
15793 case IEMMODE_32BIT:
15794 IEM_MC_BEGIN(0,0);
15795 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
15796 {
15797 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
15798 IEM_MC_IF_ECX_IS_NZ() {
15799 IEM_MC_REL_JMP_S8(i8Imm);
15800 } IEM_MC_ELSE() {
15801 IEM_MC_ADVANCE_RIP();
15802 } IEM_MC_ENDIF();
15803 }
15804 else
15805 {
15806 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
15807 IEM_MC_ADVANCE_RIP();
15808 }
15809 IEM_MC_END();
15810 return VINF_SUCCESS;
15811
15812 case IEMMODE_64BIT:
15813 IEM_MC_BEGIN(0,0);
15814 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
15815 {
15816 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
15817 IEM_MC_IF_RCX_IS_NZ() {
15818 IEM_MC_REL_JMP_S8(i8Imm);
15819 } IEM_MC_ELSE() {
15820 IEM_MC_ADVANCE_RIP();
15821 } IEM_MC_ENDIF();
15822 }
15823 else
15824 {
15825 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
15826 IEM_MC_ADVANCE_RIP();
15827 }
15828 IEM_MC_END();
15829 return VINF_SUCCESS;
15830
15831 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15832 }
15833}
15834
15835
15836/** Opcode 0xe3. */
15837FNIEMOP_DEF(iemOp_jecxz_Jb)
15838{
15839 IEMOP_MNEMONIC("jecxz Jb");
15840 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15841 IEMOP_HLP_NO_LOCK_PREFIX();
15842 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15843
15844 switch (pIemCpu->enmEffAddrMode)
15845 {
15846 case IEMMODE_16BIT:
15847 IEM_MC_BEGIN(0,0);
15848 IEM_MC_IF_CX_IS_NZ() {
15849 IEM_MC_ADVANCE_RIP();
15850 } IEM_MC_ELSE() {
15851 IEM_MC_REL_JMP_S8(i8Imm);
15852 } IEM_MC_ENDIF();
15853 IEM_MC_END();
15854 return VINF_SUCCESS;
15855
15856 case IEMMODE_32BIT:
15857 IEM_MC_BEGIN(0,0);
15858 IEM_MC_IF_ECX_IS_NZ() {
15859 IEM_MC_ADVANCE_RIP();
15860 } IEM_MC_ELSE() {
15861 IEM_MC_REL_JMP_S8(i8Imm);
15862 } IEM_MC_ENDIF();
15863 IEM_MC_END();
15864 return VINF_SUCCESS;
15865
15866 case IEMMODE_64BIT:
15867 IEM_MC_BEGIN(0,0);
15868 IEM_MC_IF_RCX_IS_NZ() {
15869 IEM_MC_ADVANCE_RIP();
15870 } IEM_MC_ELSE() {
15871 IEM_MC_REL_JMP_S8(i8Imm);
15872 } IEM_MC_ENDIF();
15873 IEM_MC_END();
15874 return VINF_SUCCESS;
15875
15876 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15877 }
15878}
15879
15880
15881/** Opcode 0xe4 */
15882FNIEMOP_DEF(iemOp_in_AL_Ib)
15883{
15884 IEMOP_MNEMONIC("in eAX,Ib");
15885 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
15886 IEMOP_HLP_NO_LOCK_PREFIX();
15887 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
15888}
15889
15890
15891/** Opcode 0xe5 */
15892FNIEMOP_DEF(iemOp_in_eAX_Ib)
15893{
15894 IEMOP_MNEMONIC("in eAX,Ib");
15895 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
15896 IEMOP_HLP_NO_LOCK_PREFIX();
15897 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
15898}
15899
15900
15901/** Opcode 0xe6 */
15902FNIEMOP_DEF(iemOp_out_Ib_AL)
15903{
15904 IEMOP_MNEMONIC("out Ib,AL");
15905 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
15906 IEMOP_HLP_NO_LOCK_PREFIX();
15907 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
15908}
15909
15910
15911/** Opcode 0xe7 */
15912FNIEMOP_DEF(iemOp_out_Ib_eAX)
15913{
15914 IEMOP_MNEMONIC("out Ib,eAX");
15915 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
15916 IEMOP_HLP_NO_LOCK_PREFIX();
15917 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
15918}
15919
15920
15921/** Opcode 0xe8. */
15922FNIEMOP_DEF(iemOp_call_Jv)
15923{
15924 IEMOP_MNEMONIC("call Jv");
15925 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15926 switch (pIemCpu->enmEffOpSize)
15927 {
15928 case IEMMODE_16BIT:
15929 {
15930 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
15931 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
15932 }
15933
15934 case IEMMODE_32BIT:
15935 {
15936 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
15937 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
15938 }
15939
15940 case IEMMODE_64BIT:
15941 {
15942 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
15943 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
15944 }
15945
15946 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15947 }
15948}
15949
15950
15951/** Opcode 0xe9. */
15952FNIEMOP_DEF(iemOp_jmp_Jv)
15953{
15954 IEMOP_MNEMONIC("jmp Jv");
15955 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15956 switch (pIemCpu->enmEffOpSize)
15957 {
15958 case IEMMODE_16BIT:
15959 {
15960 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
15961 IEM_MC_BEGIN(0, 0);
15962 IEM_MC_REL_JMP_S16(i16Imm);
15963 IEM_MC_END();
15964 return VINF_SUCCESS;
15965 }
15966
15967 case IEMMODE_64BIT:
15968 case IEMMODE_32BIT:
15969 {
15970 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
15971 IEM_MC_BEGIN(0, 0);
15972 IEM_MC_REL_JMP_S32(i32Imm);
15973 IEM_MC_END();
15974 return VINF_SUCCESS;
15975 }
15976
15977 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15978 }
15979}
15980
15981
15982/** Opcode 0xea. */
15983FNIEMOP_DEF(iemOp_jmp_Ap)
15984{
15985 IEMOP_MNEMONIC("jmp Ap");
15986 IEMOP_HLP_NO_64BIT();
15987
15988 /* Decode the far pointer address and pass it on to the far call C implementation. */
15989 uint32_t offSeg;
15990 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
15991 IEM_OPCODE_GET_NEXT_U32(&offSeg);
15992 else
15993 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
15994 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
15995 IEMOP_HLP_NO_LOCK_PREFIX();
15996 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pIemCpu->enmEffOpSize);
15997}
15998
15999
16000/** Opcode 0xeb. */
16001FNIEMOP_DEF(iemOp_jmp_Jb)
16002{
16003 IEMOP_MNEMONIC("jmp Jb");
16004 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16005 IEMOP_HLP_NO_LOCK_PREFIX();
16006 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16007
16008 IEM_MC_BEGIN(0, 0);
16009 IEM_MC_REL_JMP_S8(i8Imm);
16010 IEM_MC_END();
16011 return VINF_SUCCESS;
16012}
16013
16014
16015/** Opcode 0xec */
16016FNIEMOP_DEF(iemOp_in_AL_DX)
16017{
16018 IEMOP_MNEMONIC("in AL,DX");
16019 IEMOP_HLP_NO_LOCK_PREFIX();
16020 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
16021}
16022
16023
16024/** Opcode 0xed */
16025FNIEMOP_DEF(iemOp_eAX_DX)
16026{
16027 IEMOP_MNEMONIC("in eAX,DX");
16028 IEMOP_HLP_NO_LOCK_PREFIX();
16029 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16030}
16031
16032
16033/** Opcode 0xee */
16034FNIEMOP_DEF(iemOp_out_DX_AL)
16035{
16036 IEMOP_MNEMONIC("out DX,AL");
16037 IEMOP_HLP_NO_LOCK_PREFIX();
16038 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
16039}
16040
16041
16042/** Opcode 0xef */
16043FNIEMOP_DEF(iemOp_out_DX_eAX)
16044{
16045 IEMOP_MNEMONIC("out DX,eAX");
16046 IEMOP_HLP_NO_LOCK_PREFIX();
16047 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16048}
16049
16050
16051/** Opcode 0xf0. */
16052FNIEMOP_DEF(iemOp_lock)
16053{
16054 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
16055 pIemCpu->fPrefixes |= IEM_OP_PRF_LOCK;
16056
16057 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16058 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16059}
16060
16061
16062/** Opcode 0xf2. */
16063FNIEMOP_DEF(iemOp_repne)
16064{
16065 /* This overrides any previous REPE prefix. */
16066 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPZ;
16067 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
16068 pIemCpu->fPrefixes |= IEM_OP_PRF_REPNZ;
16069
16070 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16071 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16072}
16073
16074
16075/** Opcode 0xf3. */
16076FNIEMOP_DEF(iemOp_repe)
16077{
16078 /* This overrides any previous REPNE prefix. */
16079 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPNZ;
16080 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
16081 pIemCpu->fPrefixes |= IEM_OP_PRF_REPZ;
16082
16083 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16084 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16085}
16086
16087
16088/** Opcode 0xf4. */
16089FNIEMOP_DEF(iemOp_hlt)
16090{
16091 IEMOP_HLP_NO_LOCK_PREFIX();
16092 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
16093}
16094
16095
16096/** Opcode 0xf5. */
16097FNIEMOP_DEF(iemOp_cmc)
16098{
16099 IEMOP_MNEMONIC("cmc");
16100 IEMOP_HLP_NO_LOCK_PREFIX();
16101 IEM_MC_BEGIN(0, 0);
16102 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
16103 IEM_MC_ADVANCE_RIP();
16104 IEM_MC_END();
16105 return VINF_SUCCESS;
16106}
16107
16108
16109/**
16110 * Common implementation of 'inc/dec/not/neg Eb'.
16111 *
16112 * @param bRm The RM byte.
16113 * @param pImpl The instruction implementation.
16114 */
16115FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16116{
16117 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16118 {
16119 /* register access */
16120 IEM_MC_BEGIN(2, 0);
16121 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16122 IEM_MC_ARG(uint32_t *, pEFlags, 1);
16123 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16124 IEM_MC_REF_EFLAGS(pEFlags);
16125 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16126 IEM_MC_ADVANCE_RIP();
16127 IEM_MC_END();
16128 }
16129 else
16130 {
16131 /* memory access. */
16132 IEM_MC_BEGIN(2, 2);
16133 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16134 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16135 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16136
16137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16138 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16139 IEM_MC_FETCH_EFLAGS(EFlags);
16140 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16141 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16142 else
16143 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
16144
16145 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
16146 IEM_MC_COMMIT_EFLAGS(EFlags);
16147 IEM_MC_ADVANCE_RIP();
16148 IEM_MC_END();
16149 }
16150 return VINF_SUCCESS;
16151}
16152
16153
16154/**
16155 * Common implementation of 'inc/dec/not/neg Ev'.
16156 *
16157 * @param bRm The RM byte.
16158 * @param pImpl The instruction implementation.
16159 */
16160FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16161{
16162 /* Registers are handled by a common worker. */
16163 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16164 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16165
16166 /* Memory we do here. */
16167 switch (pIemCpu->enmEffOpSize)
16168 {
16169 case IEMMODE_16BIT:
16170 IEM_MC_BEGIN(2, 2);
16171 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16172 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16174
16175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16176 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16177 IEM_MC_FETCH_EFLAGS(EFlags);
16178 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16179 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
16180 else
16181 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
16182
16183 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
16184 IEM_MC_COMMIT_EFLAGS(EFlags);
16185 IEM_MC_ADVANCE_RIP();
16186 IEM_MC_END();
16187 return VINF_SUCCESS;
16188
16189 case IEMMODE_32BIT:
16190 IEM_MC_BEGIN(2, 2);
16191 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16192 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16194
16195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16196 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16197 IEM_MC_FETCH_EFLAGS(EFlags);
16198 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16199 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
16200 else
16201 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
16202
16203 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
16204 IEM_MC_COMMIT_EFLAGS(EFlags);
16205 IEM_MC_ADVANCE_RIP();
16206 IEM_MC_END();
16207 return VINF_SUCCESS;
16208
16209 case IEMMODE_64BIT:
16210 IEM_MC_BEGIN(2, 2);
16211 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16212 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16214
16215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16216 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16217 IEM_MC_FETCH_EFLAGS(EFlags);
16218 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16219 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
16220 else
16221 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
16222
16223 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
16224 IEM_MC_COMMIT_EFLAGS(EFlags);
16225 IEM_MC_ADVANCE_RIP();
16226 IEM_MC_END();
16227 return VINF_SUCCESS;
16228
16229 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16230 }
16231}
16232
16233
16234/** Opcode 0xf6 /0. */
16235FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
16236{
16237 IEMOP_MNEMONIC("test Eb,Ib");
16238 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16239
16240 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16241 {
16242 /* register access */
16243 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16244 IEMOP_HLP_NO_LOCK_PREFIX();
16245
16246 IEM_MC_BEGIN(3, 0);
16247 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16248 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
16249 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16250 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16251 IEM_MC_REF_EFLAGS(pEFlags);
16252 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16253 IEM_MC_ADVANCE_RIP();
16254 IEM_MC_END();
16255 }
16256 else
16257 {
16258 /* memory access. */
16259 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16260
16261 IEM_MC_BEGIN(3, 2);
16262 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16263 IEM_MC_ARG(uint8_t, u8Src, 1);
16264 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16265 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16266
16267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
16268 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16269 IEM_MC_ASSIGN(u8Src, u8Imm);
16270 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16271 IEM_MC_FETCH_EFLAGS(EFlags);
16272 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16273
16274 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
16275 IEM_MC_COMMIT_EFLAGS(EFlags);
16276 IEM_MC_ADVANCE_RIP();
16277 IEM_MC_END();
16278 }
16279 return VINF_SUCCESS;
16280}
16281
16282
16283/** Opcode 0xf7 /0. */
16284FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
16285{
16286 IEMOP_MNEMONIC("test Ev,Iv");
16287 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16288 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16289
16290 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16291 {
16292 /* register access */
16293 switch (pIemCpu->enmEffOpSize)
16294 {
16295 case IEMMODE_16BIT:
16296 {
16297 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16298 IEM_MC_BEGIN(3, 0);
16299 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16300 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
16301 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16302 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16303 IEM_MC_REF_EFLAGS(pEFlags);
16304 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16305 IEM_MC_ADVANCE_RIP();
16306 IEM_MC_END();
16307 return VINF_SUCCESS;
16308 }
16309
16310 case IEMMODE_32BIT:
16311 {
16312 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16313 IEM_MC_BEGIN(3, 0);
16314 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16315 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
16316 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16317 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16318 IEM_MC_REF_EFLAGS(pEFlags);
16319 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16320 /* No clearing the high dword here - test doesn't write back the result. */
16321 IEM_MC_ADVANCE_RIP();
16322 IEM_MC_END();
16323 return VINF_SUCCESS;
16324 }
16325
16326 case IEMMODE_64BIT:
16327 {
16328 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16329 IEM_MC_BEGIN(3, 0);
16330 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16331 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
16332 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16333 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16334 IEM_MC_REF_EFLAGS(pEFlags);
16335 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16336 IEM_MC_ADVANCE_RIP();
16337 IEM_MC_END();
16338 return VINF_SUCCESS;
16339 }
16340
16341 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16342 }
16343 }
16344 else
16345 {
16346 /* memory access. */
16347 switch (pIemCpu->enmEffOpSize)
16348 {
16349 case IEMMODE_16BIT:
16350 {
16351 IEM_MC_BEGIN(3, 2);
16352 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16353 IEM_MC_ARG(uint16_t, u16Src, 1);
16354 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16356
16357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
16358 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16359 IEM_MC_ASSIGN(u16Src, u16Imm);
16360 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16361 IEM_MC_FETCH_EFLAGS(EFlags);
16362 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16363
16364 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
16365 IEM_MC_COMMIT_EFLAGS(EFlags);
16366 IEM_MC_ADVANCE_RIP();
16367 IEM_MC_END();
16368 return VINF_SUCCESS;
16369 }
16370
16371 case IEMMODE_32BIT:
16372 {
16373 IEM_MC_BEGIN(3, 2);
16374 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16375 IEM_MC_ARG(uint32_t, u32Src, 1);
16376 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16378
16379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16380 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16381 IEM_MC_ASSIGN(u32Src, u32Imm);
16382 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16383 IEM_MC_FETCH_EFLAGS(EFlags);
16384 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16385
16386 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
16387 IEM_MC_COMMIT_EFLAGS(EFlags);
16388 IEM_MC_ADVANCE_RIP();
16389 IEM_MC_END();
16390 return VINF_SUCCESS;
16391 }
16392
16393 case IEMMODE_64BIT:
16394 {
16395 IEM_MC_BEGIN(3, 2);
16396 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16397 IEM_MC_ARG(uint64_t, u64Src, 1);
16398 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16400
16401 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16402 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16403 IEM_MC_ASSIGN(u64Src, u64Imm);
16404 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16405 IEM_MC_FETCH_EFLAGS(EFlags);
16406 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16407
16408 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
16409 IEM_MC_COMMIT_EFLAGS(EFlags);
16410 IEM_MC_ADVANCE_RIP();
16411 IEM_MC_END();
16412 return VINF_SUCCESS;
16413 }
16414
16415 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16416 }
16417 }
16418}
16419
16420
16421/** Opcode 0xf6 /4, /5, /6 and /7. */
16422FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
16423{
16424 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16425
16426 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16427 {
16428 /* register access */
16429 IEMOP_HLP_NO_LOCK_PREFIX();
16430 IEM_MC_BEGIN(3, 1);
16431 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16432 IEM_MC_ARG(uint8_t, u8Value, 1);
16433 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16434 IEM_MC_LOCAL(int32_t, rc);
16435
16436 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16437 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16438 IEM_MC_REF_EFLAGS(pEFlags);
16439 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16440 IEM_MC_IF_LOCAL_IS_Z(rc) {
16441 IEM_MC_ADVANCE_RIP();
16442 } IEM_MC_ELSE() {
16443 IEM_MC_RAISE_DIVIDE_ERROR();
16444 } IEM_MC_ENDIF();
16445
16446 IEM_MC_END();
16447 }
16448 else
16449 {
16450 /* memory access. */
16451 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16452
16453 IEM_MC_BEGIN(3, 2);
16454 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16455 IEM_MC_ARG(uint8_t, u8Value, 1);
16456 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16458 IEM_MC_LOCAL(int32_t, rc);
16459
16460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16461 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
16462 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16463 IEM_MC_REF_EFLAGS(pEFlags);
16464 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16465 IEM_MC_IF_LOCAL_IS_Z(rc) {
16466 IEM_MC_ADVANCE_RIP();
16467 } IEM_MC_ELSE() {
16468 IEM_MC_RAISE_DIVIDE_ERROR();
16469 } IEM_MC_ENDIF();
16470
16471 IEM_MC_END();
16472 }
16473 return VINF_SUCCESS;
16474}
16475
16476
16477/** Opcode 0xf7 /4, /5, /6 and /7. */
16478FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
16479{
16480 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16481 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16482
16483 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16484 {
16485 /* register access */
16486 switch (pIemCpu->enmEffOpSize)
16487 {
16488 case IEMMODE_16BIT:
16489 {
16490 IEMOP_HLP_NO_LOCK_PREFIX();
16491 IEM_MC_BEGIN(4, 1);
16492 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16493 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16494 IEM_MC_ARG(uint16_t, u16Value, 2);
16495 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16496 IEM_MC_LOCAL(int32_t, rc);
16497
16498 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16499 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16500 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
16501 IEM_MC_REF_EFLAGS(pEFlags);
16502 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
16503 IEM_MC_IF_LOCAL_IS_Z(rc) {
16504 IEM_MC_ADVANCE_RIP();
16505 } IEM_MC_ELSE() {
16506 IEM_MC_RAISE_DIVIDE_ERROR();
16507 } IEM_MC_ENDIF();
16508
16509 IEM_MC_END();
16510 return VINF_SUCCESS;
16511 }
16512
16513 case IEMMODE_32BIT:
16514 {
16515 IEMOP_HLP_NO_LOCK_PREFIX();
16516 IEM_MC_BEGIN(4, 1);
16517 IEM_MC_ARG(uint32_t *, pu32AX, 0);
16518 IEM_MC_ARG(uint32_t *, pu32DX, 1);
16519 IEM_MC_ARG(uint32_t, u32Value, 2);
16520 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16521 IEM_MC_LOCAL(int32_t, rc);
16522
16523 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16524 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
16525 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
16526 IEM_MC_REF_EFLAGS(pEFlags);
16527 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
16528 IEM_MC_IF_LOCAL_IS_Z(rc) {
16529 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
16530 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
16531 IEM_MC_ADVANCE_RIP();
16532 } IEM_MC_ELSE() {
16533 IEM_MC_RAISE_DIVIDE_ERROR();
16534 } IEM_MC_ENDIF();
16535
16536 IEM_MC_END();
16537 return VINF_SUCCESS;
16538 }
16539
16540 case IEMMODE_64BIT:
16541 {
16542 IEMOP_HLP_NO_LOCK_PREFIX();
16543 IEM_MC_BEGIN(4, 1);
16544 IEM_MC_ARG(uint64_t *, pu64AX, 0);
16545 IEM_MC_ARG(uint64_t *, pu64DX, 1);
16546 IEM_MC_ARG(uint64_t, u64Value, 2);
16547 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16548 IEM_MC_LOCAL(int32_t, rc);
16549
16550 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16551 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
16552 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
16553 IEM_MC_REF_EFLAGS(pEFlags);
16554 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
16555 IEM_MC_IF_LOCAL_IS_Z(rc) {
16556 IEM_MC_ADVANCE_RIP();
16557 } IEM_MC_ELSE() {
16558 IEM_MC_RAISE_DIVIDE_ERROR();
16559 } IEM_MC_ENDIF();
16560
16561 IEM_MC_END();
16562 return VINF_SUCCESS;
16563 }
16564
16565 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16566 }
16567 }
16568 else
16569 {
16570 /* memory access. */
16571 switch (pIemCpu->enmEffOpSize)
16572 {
16573 case IEMMODE_16BIT:
16574 {
16575 IEMOP_HLP_NO_LOCK_PREFIX();
16576 IEM_MC_BEGIN(4, 2);
16577 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16578 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16579 IEM_MC_ARG(uint16_t, u16Value, 2);
16580 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16582 IEM_MC_LOCAL(int32_t, rc);
16583
16584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16585 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
16586 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16587 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
16588 IEM_MC_REF_EFLAGS(pEFlags);
16589 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
16590 IEM_MC_IF_LOCAL_IS_Z(rc) {
16591 IEM_MC_ADVANCE_RIP();
16592 } IEM_MC_ELSE() {
16593 IEM_MC_RAISE_DIVIDE_ERROR();
16594 } IEM_MC_ENDIF();
16595
16596 IEM_MC_END();
16597 return VINF_SUCCESS;
16598 }
16599
16600 case IEMMODE_32BIT:
16601 {
16602 IEMOP_HLP_NO_LOCK_PREFIX();
16603 IEM_MC_BEGIN(4, 2);
16604 IEM_MC_ARG(uint32_t *, pu32AX, 0);
16605 IEM_MC_ARG(uint32_t *, pu32DX, 1);
16606 IEM_MC_ARG(uint32_t, u32Value, 2);
16607 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16609 IEM_MC_LOCAL(int32_t, rc);
16610
16611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16612 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
16613 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
16614 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
16615 IEM_MC_REF_EFLAGS(pEFlags);
16616 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
16617 IEM_MC_IF_LOCAL_IS_Z(rc) {
16618 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
16619 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
16620 IEM_MC_ADVANCE_RIP();
16621 } IEM_MC_ELSE() {
16622 IEM_MC_RAISE_DIVIDE_ERROR();
16623 } IEM_MC_ENDIF();
16624
16625 IEM_MC_END();
16626 return VINF_SUCCESS;
16627 }
16628
16629 case IEMMODE_64BIT:
16630 {
16631 IEMOP_HLP_NO_LOCK_PREFIX();
16632 IEM_MC_BEGIN(4, 2);
16633 IEM_MC_ARG(uint64_t *, pu64AX, 0);
16634 IEM_MC_ARG(uint64_t *, pu64DX, 1);
16635 IEM_MC_ARG(uint64_t, u64Value, 2);
16636 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16638 IEM_MC_LOCAL(int32_t, rc);
16639
16640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16641 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
16642 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
16643 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
16644 IEM_MC_REF_EFLAGS(pEFlags);
16645 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
16646 IEM_MC_IF_LOCAL_IS_Z(rc) {
16647 IEM_MC_ADVANCE_RIP();
16648 } IEM_MC_ELSE() {
16649 IEM_MC_RAISE_DIVIDE_ERROR();
16650 } IEM_MC_ENDIF();
16651
16652 IEM_MC_END();
16653 return VINF_SUCCESS;
16654 }
16655
16656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16657 }
16658 }
16659}
16660
16661/** Opcode 0xf6. */
16662FNIEMOP_DEF(iemOp_Grp3_Eb)
16663{
16664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16665 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16666 {
16667 case 0:
16668 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
16669 case 1:
16670 return IEMOP_RAISE_INVALID_OPCODE();
16671 case 2:
16672 IEMOP_MNEMONIC("not Eb");
16673 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
16674 case 3:
16675 IEMOP_MNEMONIC("neg Eb");
16676 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
16677 case 4:
16678 IEMOP_MNEMONIC("mul Eb");
16679 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16680 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
16681 case 5:
16682 IEMOP_MNEMONIC("imul Eb");
16683 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16684 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
16685 case 6:
16686 IEMOP_MNEMONIC("div Eb");
16687 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16688 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
16689 case 7:
16690 IEMOP_MNEMONIC("idiv Eb");
16691 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16692 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
16693 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16694 }
16695}
16696
16697
16698/** Opcode 0xf7. */
16699FNIEMOP_DEF(iemOp_Grp3_Ev)
16700{
16701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16702 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16703 {
16704 case 0:
16705 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
16706 case 1:
16707 return IEMOP_RAISE_INVALID_OPCODE();
16708 case 2:
16709 IEMOP_MNEMONIC("not Ev");
16710 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
16711 case 3:
16712 IEMOP_MNEMONIC("neg Ev");
16713 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
16714 case 4:
16715 IEMOP_MNEMONIC("mul Ev");
16716 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16717 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
16718 case 5:
16719 IEMOP_MNEMONIC("imul Ev");
16720 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16721 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
16722 case 6:
16723 IEMOP_MNEMONIC("div Ev");
16724 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16725 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
16726 case 7:
16727 IEMOP_MNEMONIC("idiv Ev");
16728 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16729 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
16730 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16731 }
16732}
16733
16734
16735/** Opcode 0xf8. */
16736FNIEMOP_DEF(iemOp_clc)
16737{
16738 IEMOP_MNEMONIC("clc");
16739 IEMOP_HLP_NO_LOCK_PREFIX();
16740 IEM_MC_BEGIN(0, 0);
16741 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
16742 IEM_MC_ADVANCE_RIP();
16743 IEM_MC_END();
16744 return VINF_SUCCESS;
16745}
16746
16747
16748/** Opcode 0xf9. */
16749FNIEMOP_DEF(iemOp_stc)
16750{
16751 IEMOP_MNEMONIC("stc");
16752 IEMOP_HLP_NO_LOCK_PREFIX();
16753 IEM_MC_BEGIN(0, 0);
16754 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
16755 IEM_MC_ADVANCE_RIP();
16756 IEM_MC_END();
16757 return VINF_SUCCESS;
16758}
16759
16760
16761/** Opcode 0xfa. */
16762FNIEMOP_DEF(iemOp_cli)
16763{
16764 IEMOP_MNEMONIC("cli");
16765 IEMOP_HLP_NO_LOCK_PREFIX();
16766 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
16767}
16768
16769
16770FNIEMOP_DEF(iemOp_sti)
16771{
16772 IEMOP_MNEMONIC("sti");
16773 IEMOP_HLP_NO_LOCK_PREFIX();
16774 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
16775}
16776
16777
16778/** Opcode 0xfc. */
16779FNIEMOP_DEF(iemOp_cld)
16780{
16781 IEMOP_MNEMONIC("cld");
16782 IEMOP_HLP_NO_LOCK_PREFIX();
16783 IEM_MC_BEGIN(0, 0);
16784 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
16785 IEM_MC_ADVANCE_RIP();
16786 IEM_MC_END();
16787 return VINF_SUCCESS;
16788}
16789
16790
16791/** Opcode 0xfd. */
16792FNIEMOP_DEF(iemOp_std)
16793{
16794 IEMOP_MNEMONIC("std");
16795 IEMOP_HLP_NO_LOCK_PREFIX();
16796 IEM_MC_BEGIN(0, 0);
16797 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
16798 IEM_MC_ADVANCE_RIP();
16799 IEM_MC_END();
16800 return VINF_SUCCESS;
16801}
16802
16803
16804/** Opcode 0xfe. */
16805FNIEMOP_DEF(iemOp_Grp4)
16806{
16807 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16808 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16809 {
16810 case 0:
16811 IEMOP_MNEMONIC("inc Ev");
16812 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
16813 case 1:
16814 IEMOP_MNEMONIC("dec Ev");
16815 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
16816 default:
16817 IEMOP_MNEMONIC("grp4-ud");
16818 return IEMOP_RAISE_INVALID_OPCODE();
16819 }
16820}
16821
16822
16823/**
16824 * Opcode 0xff /2.
16825 * @param bRm The RM byte.
16826 */
16827FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
16828{
16829 IEMOP_MNEMONIC("calln Ev");
16830 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
16831 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16832
16833 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16834 {
16835 /* The new RIP is taken from a register. */
16836 switch (pIemCpu->enmEffOpSize)
16837 {
16838 case IEMMODE_16BIT:
16839 IEM_MC_BEGIN(1, 0);
16840 IEM_MC_ARG(uint16_t, u16Target, 0);
16841 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16842 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
16843 IEM_MC_END()
16844 return VINF_SUCCESS;
16845
16846 case IEMMODE_32BIT:
16847 IEM_MC_BEGIN(1, 0);
16848 IEM_MC_ARG(uint32_t, u32Target, 0);
16849 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16850 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
16851 IEM_MC_END()
16852 return VINF_SUCCESS;
16853
16854 case IEMMODE_64BIT:
16855 IEM_MC_BEGIN(1, 0);
16856 IEM_MC_ARG(uint64_t, u64Target, 0);
16857 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16858 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
16859 IEM_MC_END()
16860 return VINF_SUCCESS;
16861
16862 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16863 }
16864 }
16865 else
16866 {
16867 /* The new RIP is taken from a register. */
16868 switch (pIemCpu->enmEffOpSize)
16869 {
16870 case IEMMODE_16BIT:
16871 IEM_MC_BEGIN(1, 1);
16872 IEM_MC_ARG(uint16_t, u16Target, 0);
16873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16875 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
16876 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
16877 IEM_MC_END()
16878 return VINF_SUCCESS;
16879
16880 case IEMMODE_32BIT:
16881 IEM_MC_BEGIN(1, 1);
16882 IEM_MC_ARG(uint32_t, u32Target, 0);
16883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16885 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
16886 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
16887 IEM_MC_END()
16888 return VINF_SUCCESS;
16889
16890 case IEMMODE_64BIT:
16891 IEM_MC_BEGIN(1, 1);
16892 IEM_MC_ARG(uint64_t, u64Target, 0);
16893 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16894 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16895 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
16896 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
16897 IEM_MC_END()
16898 return VINF_SUCCESS;
16899
16900 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16901 }
16902 }
16903}
16904
16905typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
16906
16907FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
16908{
16909 /* Registers? How?? */
16910 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16911 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
16912
16913 /* Far pointer loaded from memory. */
16914 switch (pIemCpu->enmEffOpSize)
16915 {
16916 case IEMMODE_16BIT:
16917 IEM_MC_BEGIN(3, 1);
16918 IEM_MC_ARG(uint16_t, u16Sel, 0);
16919 IEM_MC_ARG(uint16_t, offSeg, 1);
16920 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
16921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16924 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
16925 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 2);
16926 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
16927 IEM_MC_END();
16928 return VINF_SUCCESS;
16929
16930 case IEMMODE_64BIT:
16931 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
16932 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
16933 * and call far qword [rsp] encodings. */
16934 if (!IEM_IS_GUEST_CPU_AMD(pIemCpu))
16935 {
16936 IEM_MC_BEGIN(3, 1);
16937 IEM_MC_ARG(uint16_t, u16Sel, 0);
16938 IEM_MC_ARG(uint64_t, offSeg, 1);
16939 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
16940 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16943 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
16944 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 8);
16945 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
16946 IEM_MC_END();
16947 return VINF_SUCCESS;
16948 }
16949 /* AMD falls thru. */
16950
16951 case IEMMODE_32BIT:
16952 IEM_MC_BEGIN(3, 1);
16953 IEM_MC_ARG(uint16_t, u16Sel, 0);
16954 IEM_MC_ARG(uint32_t, offSeg, 1);
16955 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
16956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
16957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
16958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
16959 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
16960 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 4);
16961 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
16962 IEM_MC_END();
16963 return VINF_SUCCESS;
16964
16965 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16966 }
16967}
16968
16969
16970/**
16971 * Opcode 0xff /3.
16972 * @param bRm The RM byte.
16973 */
16974FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
16975{
16976 IEMOP_MNEMONIC("callf Ep");
16977 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
16978}
16979
16980
16981/**
16982 * Opcode 0xff /4.
16983 * @param bRm The RM byte.
16984 */
16985FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
16986{
16987 IEMOP_MNEMONIC("jmpn Ev");
16988 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
16989 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16990
16991 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16992 {
16993 /* The new RIP is taken from a register. */
16994 switch (pIemCpu->enmEffOpSize)
16995 {
16996 case IEMMODE_16BIT:
16997 IEM_MC_BEGIN(0, 1);
16998 IEM_MC_LOCAL(uint16_t, u16Target);
16999 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17000 IEM_MC_SET_RIP_U16(u16Target);
17001 IEM_MC_END()
17002 return VINF_SUCCESS;
17003
17004 case IEMMODE_32BIT:
17005 IEM_MC_BEGIN(0, 1);
17006 IEM_MC_LOCAL(uint32_t, u32Target);
17007 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17008 IEM_MC_SET_RIP_U32(u32Target);
17009 IEM_MC_END()
17010 return VINF_SUCCESS;
17011
17012 case IEMMODE_64BIT:
17013 IEM_MC_BEGIN(0, 1);
17014 IEM_MC_LOCAL(uint64_t, u64Target);
17015 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17016 IEM_MC_SET_RIP_U64(u64Target);
17017 IEM_MC_END()
17018 return VINF_SUCCESS;
17019
17020 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17021 }
17022 }
17023 else
17024 {
17025 /* The new RIP is taken from a memory location. */
17026 switch (pIemCpu->enmEffOpSize)
17027 {
17028 case IEMMODE_16BIT:
17029 IEM_MC_BEGIN(0, 2);
17030 IEM_MC_LOCAL(uint16_t, u16Target);
17031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17033 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17034 IEM_MC_SET_RIP_U16(u16Target);
17035 IEM_MC_END()
17036 return VINF_SUCCESS;
17037
17038 case IEMMODE_32BIT:
17039 IEM_MC_BEGIN(0, 2);
17040 IEM_MC_LOCAL(uint32_t, u32Target);
17041 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17043 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17044 IEM_MC_SET_RIP_U32(u32Target);
17045 IEM_MC_END()
17046 return VINF_SUCCESS;
17047
17048 case IEMMODE_64BIT:
17049 IEM_MC_BEGIN(0, 2);
17050 IEM_MC_LOCAL(uint64_t, u64Target);
17051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17053 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17054 IEM_MC_SET_RIP_U64(u64Target);
17055 IEM_MC_END()
17056 return VINF_SUCCESS;
17057
17058 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17059 }
17060 }
17061}
17062
17063
17064/**
17065 * Opcode 0xff /5.
17066 * @param bRm The RM byte.
17067 */
17068FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
17069{
17070 IEMOP_MNEMONIC("jmpf Ep");
17071 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
17072}
17073
17074
17075/**
17076 * Opcode 0xff /6.
17077 * @param bRm The RM byte.
17078 */
17079FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
17080{
17081 IEMOP_MNEMONIC("push Ev");
17082 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17083
17084 /* Registers are handled by a common worker. */
17085 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17086 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17087
17088 /* Memory we do here. */
17089 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17090 switch (pIemCpu->enmEffOpSize)
17091 {
17092 case IEMMODE_16BIT:
17093 IEM_MC_BEGIN(0, 2);
17094 IEM_MC_LOCAL(uint16_t, u16Src);
17095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17097 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17098 IEM_MC_PUSH_U16(u16Src);
17099 IEM_MC_ADVANCE_RIP();
17100 IEM_MC_END();
17101 return VINF_SUCCESS;
17102
17103 case IEMMODE_32BIT:
17104 IEM_MC_BEGIN(0, 2);
17105 IEM_MC_LOCAL(uint32_t, u32Src);
17106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17108 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17109 IEM_MC_PUSH_U32(u32Src);
17110 IEM_MC_ADVANCE_RIP();
17111 IEM_MC_END();
17112 return VINF_SUCCESS;
17113
17114 case IEMMODE_64BIT:
17115 IEM_MC_BEGIN(0, 2);
17116 IEM_MC_LOCAL(uint64_t, u64Src);
17117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17119 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17120 IEM_MC_PUSH_U64(u64Src);
17121 IEM_MC_ADVANCE_RIP();
17122 IEM_MC_END();
17123 return VINF_SUCCESS;
17124
17125 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17126 }
17127}
17128
17129
17130/** Opcode 0xff. */
17131FNIEMOP_DEF(iemOp_Grp5)
17132{
17133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17134 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17135 {
17136 case 0:
17137 IEMOP_MNEMONIC("inc Ev");
17138 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
17139 case 1:
17140 IEMOP_MNEMONIC("dec Ev");
17141 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
17142 case 2:
17143 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
17144 case 3:
17145 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
17146 case 4:
17147 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
17148 case 5:
17149 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
17150 case 6:
17151 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
17152 case 7:
17153 IEMOP_MNEMONIC("grp5-ud");
17154 return IEMOP_RAISE_INVALID_OPCODE();
17155 }
17156 AssertFailedReturn(VERR_INTERNAL_ERROR_2);
17157}
17158
17159
17160
17161const PFNIEMOP g_apfnOneByteMap[256] =
17162{
17163 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
17164 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
17165 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
17166 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
17167 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
17168 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
17169 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
17170 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
17171 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
17172 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
17173 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
17174 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
17175 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
17176 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
17177 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
17178 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
17179 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
17180 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
17181 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
17182 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
17183 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
17184 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
17185 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
17186 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
17187 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
17188 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
17189 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
17190 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
17191 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
17192 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
17193 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
17194 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
17195 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
17196 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
17197 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
17198 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
17199 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
17200 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
17201 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
17202 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
17203 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
17204 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
17205 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
17206 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
17207 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
17208 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
17209 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
17210 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
17211 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
17212 /* 0xc4 */ iemOp_les_Gv_Mp, iemOp_lds_Gv_Mp, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
17213 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
17214 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
17215 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
17216 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_Invalid, iemOp_xlat,
17217 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
17218 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
17219 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
17220 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
17221 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
17222 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
17223 /* 0xf0 */ iemOp_lock, iemOp_Invalid, iemOp_repne, iemOp_repe, /** @todo 0xf1 is INT1 / ICEBP. */
17224 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
17225 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
17226 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
17227};
17228
17229
17230/** @} */
17231
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette