VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 44973

Last change on this file since 44973 was 42729, checked in by vboxsync, 12 years ago

IEM: Fixed indexing bug in iemOp_EscF1.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 527.6 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 42729 2012-08-09 19:31:30Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2012 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/**
26 * Common worker for instructions like ADD, AND, OR, ++ with a byte
27 * memory/register as the destination.
28 *
29 * @param pImpl Pointer to the instruction implementation (assembly).
30 */
31FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
32{
33 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
34
35 /*
36 * If rm is denoting a register, no more instruction bytes.
37 */
38 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
39 {
40 IEMOP_HLP_NO_LOCK_PREFIX();
41
42 IEM_MC_BEGIN(3, 0);
43 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
44 IEM_MC_ARG(uint8_t, u8Src, 1);
45 IEM_MC_ARG(uint32_t *, pEFlags, 2);
46
47 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
48 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
49 IEM_MC_REF_EFLAGS(pEFlags);
50 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
51
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * We're accessing memory.
59 * Note! We're putting the eflags on the stack here so we can commit them
60 * after the memory.
61 */
62 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
63 IEM_MC_BEGIN(3, 2);
64 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
65 IEM_MC_ARG(uint8_t, u8Src, 1);
66 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
70 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
71 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
72 IEM_MC_FETCH_EFLAGS(EFlags);
73 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
74 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
75 else
76 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
77
78 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
79 IEM_MC_COMMIT_EFLAGS(EFlags);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/**
88 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
89 * memory/register as the destination.
90 *
91 * @param pImpl Pointer to the instruction implementation (assembly).
92 */
93FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
94{
95 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
96
97 /*
98 * If rm is denoting a register, no more instruction bytes.
99 */
100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
101 {
102 IEMOP_HLP_NO_LOCK_PREFIX();
103
104 switch (pIemCpu->enmEffOpSize)
105 {
106 case IEMMODE_16BIT:
107 IEM_MC_BEGIN(3, 0);
108 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
109 IEM_MC_ARG(uint16_t, u16Src, 1);
110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
111
112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
114 IEM_MC_REF_EFLAGS(pEFlags);
115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 break;
120
121 case IEMMODE_32BIT:
122 IEM_MC_BEGIN(3, 0);
123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
124 IEM_MC_ARG(uint32_t, u32Src, 1);
125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
126
127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
128 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
129 IEM_MC_REF_EFLAGS(pEFlags);
130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
131
132 IEM_MC_ADVANCE_RIP();
133 IEM_MC_END();
134 break;
135
136 case IEMMODE_64BIT:
137 IEM_MC_BEGIN(3, 0);
138 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
139 IEM_MC_ARG(uint64_t, u64Src, 1);
140 IEM_MC_ARG(uint32_t *, pEFlags, 2);
141
142 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
143 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
144 IEM_MC_REF_EFLAGS(pEFlags);
145 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
146
147 IEM_MC_ADVANCE_RIP();
148 IEM_MC_END();
149 break;
150 }
151 }
152 else
153 {
154 /*
155 * We're accessing memory.
156 * Note! We're putting the eflags on the stack here so we can commit them
157 * after the memory.
158 */
159 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
160 switch (pIemCpu->enmEffOpSize)
161 {
162 case IEMMODE_16BIT:
163 IEM_MC_BEGIN(3, 2);
164 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
165 IEM_MC_ARG(uint16_t, u16Src, 1);
166 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
168
169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
170 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
171 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
172 IEM_MC_FETCH_EFLAGS(EFlags);
173 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
174 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
175 else
176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
177
178 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
179 IEM_MC_COMMIT_EFLAGS(EFlags);
180 IEM_MC_ADVANCE_RIP();
181 IEM_MC_END();
182 break;
183
184 case IEMMODE_32BIT:
185 IEM_MC_BEGIN(3, 2);
186 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
187 IEM_MC_ARG(uint32_t, u32Src, 1);
188 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
190
191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
192 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
193 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
194 IEM_MC_FETCH_EFLAGS(EFlags);
195 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
196 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
197 else
198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
199
200 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
201 IEM_MC_COMMIT_EFLAGS(EFlags);
202 IEM_MC_ADVANCE_RIP();
203 IEM_MC_END();
204 break;
205
206 case IEMMODE_64BIT:
207 IEM_MC_BEGIN(3, 2);
208 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
209 IEM_MC_ARG(uint64_t, u64Src, 1);
210 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
212
213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
214 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
215 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
216 IEM_MC_FETCH_EFLAGS(EFlags);
217 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
218 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
219 else
220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
221
222 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
223 IEM_MC_COMMIT_EFLAGS(EFlags);
224 IEM_MC_ADVANCE_RIP();
225 IEM_MC_END();
226 break;
227 }
228 }
229 return VINF_SUCCESS;
230}
231
232
233/**
234 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
235 * the destination.
236 *
237 * @param pImpl Pointer to the instruction implementation (assembly).
238 */
239FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
240{
241 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
242 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
243
244 /*
245 * If rm is denoting a register, no more instruction bytes.
246 */
247 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
248 {
249 IEM_MC_BEGIN(3, 0);
250 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
251 IEM_MC_ARG(uint8_t, u8Src, 1);
252 IEM_MC_ARG(uint32_t *, pEFlags, 2);
253
254 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
255 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
256 IEM_MC_REF_EFLAGS(pEFlags);
257 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
258
259 IEM_MC_ADVANCE_RIP();
260 IEM_MC_END();
261 }
262 else
263 {
264 /*
265 * We're accessing memory.
266 */
267 IEM_MC_BEGIN(3, 1);
268 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
269 IEM_MC_ARG(uint8_t, u8Src, 1);
270 IEM_MC_ARG(uint32_t *, pEFlags, 2);
271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
272
273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
274 IEM_MC_FETCH_MEM_U8(u8Src, pIemCpu->iEffSeg, GCPtrEffDst);
275 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
276 IEM_MC_REF_EFLAGS(pEFlags);
277 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
278
279 IEM_MC_ADVANCE_RIP();
280 IEM_MC_END();
281 }
282 return VINF_SUCCESS;
283}
284
285
286/**
287 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
288 * register as the destination.
289 *
290 * @param pImpl Pointer to the instruction implementation (assembly).
291 */
292FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
293{
294 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
295 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
296
297 /*
298 * If rm is denoting a register, no more instruction bytes.
299 */
300 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
301 {
302 switch (pIemCpu->enmEffOpSize)
303 {
304 case IEMMODE_16BIT:
305 IEM_MC_BEGIN(3, 0);
306 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
307 IEM_MC_ARG(uint16_t, u16Src, 1);
308 IEM_MC_ARG(uint32_t *, pEFlags, 2);
309
310 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
311 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
312 IEM_MC_REF_EFLAGS(pEFlags);
313 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
314
315 IEM_MC_ADVANCE_RIP();
316 IEM_MC_END();
317 break;
318
319 case IEMMODE_32BIT:
320 IEM_MC_BEGIN(3, 0);
321 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
322 IEM_MC_ARG(uint32_t, u32Src, 1);
323 IEM_MC_ARG(uint32_t *, pEFlags, 2);
324
325 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
326 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
327 IEM_MC_REF_EFLAGS(pEFlags);
328 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
329
330 IEM_MC_ADVANCE_RIP();
331 IEM_MC_END();
332 break;
333
334 case IEMMODE_64BIT:
335 IEM_MC_BEGIN(3, 0);
336 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
337 IEM_MC_ARG(uint64_t, u64Src, 1);
338 IEM_MC_ARG(uint32_t *, pEFlags, 2);
339
340 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
341 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
342 IEM_MC_REF_EFLAGS(pEFlags);
343 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
344
345 IEM_MC_ADVANCE_RIP();
346 IEM_MC_END();
347 break;
348 }
349 }
350 else
351 {
352 /*
353 * We're accessing memory.
354 */
355 switch (pIemCpu->enmEffOpSize)
356 {
357 case IEMMODE_16BIT:
358 IEM_MC_BEGIN(3, 1);
359 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
360 IEM_MC_ARG(uint16_t, u16Src, 1);
361 IEM_MC_ARG(uint32_t *, pEFlags, 2);
362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
363
364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
365 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffDst);
366 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
367 IEM_MC_REF_EFLAGS(pEFlags);
368 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
369
370 IEM_MC_ADVANCE_RIP();
371 IEM_MC_END();
372 break;
373
374 case IEMMODE_32BIT:
375 IEM_MC_BEGIN(3, 1);
376 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
377 IEM_MC_ARG(uint32_t, u32Src, 1);
378 IEM_MC_ARG(uint32_t *, pEFlags, 2);
379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
380
381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
382 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffDst);
383 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
384 IEM_MC_REF_EFLAGS(pEFlags);
385 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
386
387 IEM_MC_ADVANCE_RIP();
388 IEM_MC_END();
389 break;
390
391 case IEMMODE_64BIT:
392 IEM_MC_BEGIN(3, 1);
393 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
394 IEM_MC_ARG(uint64_t, u64Src, 1);
395 IEM_MC_ARG(uint32_t *, pEFlags, 2);
396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
397
398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
399 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffDst);
400 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
401 IEM_MC_REF_EFLAGS(pEFlags);
402 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
403
404 IEM_MC_ADVANCE_RIP();
405 IEM_MC_END();
406 break;
407 }
408 }
409 return VINF_SUCCESS;
410}
411
412
413/**
414 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
415 * a byte immediate.
416 *
417 * @param pImpl Pointer to the instruction implementation (assembly).
418 */
419FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
420{
421 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
422 IEMOP_HLP_NO_LOCK_PREFIX();
423
424 IEM_MC_BEGIN(3, 0);
425 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
426 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
427 IEM_MC_ARG(uint32_t *, pEFlags, 2);
428
429 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
430 IEM_MC_REF_EFLAGS(pEFlags);
431 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
432
433 IEM_MC_ADVANCE_RIP();
434 IEM_MC_END();
435 return VINF_SUCCESS;
436}
437
438
439/**
440 * Common worker for instructions like ADD, AND, OR, ++ with working on
441 * AX/EAX/RAX with a word/dword immediate.
442 *
443 * @param pImpl Pointer to the instruction implementation (assembly).
444 */
445FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
446{
447 switch (pIemCpu->enmEffOpSize)
448 {
449 case IEMMODE_16BIT:
450 {
451 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
452 IEMOP_HLP_NO_LOCK_PREFIX();
453
454 IEM_MC_BEGIN(3, 0);
455 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
456 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
457 IEM_MC_ARG(uint32_t *, pEFlags, 2);
458
459 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
460 IEM_MC_REF_EFLAGS(pEFlags);
461 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
462
463 IEM_MC_ADVANCE_RIP();
464 IEM_MC_END();
465 return VINF_SUCCESS;
466 }
467
468 case IEMMODE_32BIT:
469 {
470 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
471 IEMOP_HLP_NO_LOCK_PREFIX();
472
473 IEM_MC_BEGIN(3, 0);
474 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
475 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
476 IEM_MC_ARG(uint32_t *, pEFlags, 2);
477
478 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
479 IEM_MC_REF_EFLAGS(pEFlags);
480 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
481
482 IEM_MC_ADVANCE_RIP();
483 IEM_MC_END();
484 return VINF_SUCCESS;
485 }
486
487 case IEMMODE_64BIT:
488 {
489 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
490 IEMOP_HLP_NO_LOCK_PREFIX();
491
492 IEM_MC_BEGIN(3, 0);
493 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
494 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
495 IEM_MC_ARG(uint32_t *, pEFlags, 2);
496
497 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
498 IEM_MC_REF_EFLAGS(pEFlags);
499 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
500
501 IEM_MC_ADVANCE_RIP();
502 IEM_MC_END();
503 return VINF_SUCCESS;
504 }
505
506 IEM_NOT_REACHED_DEFAULT_CASE_RET();
507 }
508}
509
510
511/** Opcodes 0xf1, 0xd6. */
512FNIEMOP_DEF(iemOp_Invalid)
513{
514 IEMOP_MNEMONIC("Invalid");
515 return IEMOP_RAISE_INVALID_OPCODE();
516}
517
518
519
520/** @name ..... opcodes.
521 *
522 * @{
523 */
524
525/** @} */
526
527
528/** @name Two byte opcodes (first byte 0x0f).
529 *
530 * @{
531 */
532
533/** Opcode 0x0f 0x00 /0. */
534FNIEMOP_DEF_1(iemOp_Grp6_sldt, uint8_t, bRm)
535{
536 IEMOP_MNEMONIC("sldt Rv/Mw");
537 IEMOP_HLP_NO_REAL_OR_V86_MODE();
538
539 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
540 {
541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
542 switch (pIemCpu->enmEffOpSize)
543 {
544 case IEMMODE_16BIT:
545 IEM_MC_BEGIN(0, 1);
546 IEM_MC_LOCAL(uint16_t, u16Ldtr);
547 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
548 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Ldtr);
549 IEM_MC_ADVANCE_RIP();
550 IEM_MC_END();
551 break;
552
553 case IEMMODE_32BIT:
554 IEM_MC_BEGIN(0, 1);
555 IEM_MC_LOCAL(uint32_t, u32Ldtr);
556 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
557 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Ldtr);
558 IEM_MC_ADVANCE_RIP();
559 IEM_MC_END();
560 break;
561
562 case IEMMODE_64BIT:
563 IEM_MC_BEGIN(0, 1);
564 IEM_MC_LOCAL(uint64_t, u64Ldtr);
565 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
566 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Ldtr);
567 IEM_MC_ADVANCE_RIP();
568 IEM_MC_END();
569 break;
570
571 IEM_NOT_REACHED_DEFAULT_CASE_RET();
572 }
573 }
574 else
575 {
576 IEM_MC_BEGIN(0, 2);
577 IEM_MC_LOCAL(uint16_t, u16Ldtr);
578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
581 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
582 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Ldtr);
583 IEM_MC_ADVANCE_RIP();
584 IEM_MC_END();
585 }
586 return VINF_SUCCESS;
587}
588
589
590/** Opcode 0x0f 0x00 /1. */
591FNIEMOP_DEF_1(iemOp_Grp6_str, uint8_t, bRm)
592{
593 IEMOP_MNEMONIC("str Rv/Mw");
594 IEMOP_HLP_NO_REAL_OR_V86_MODE();
595
596 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
597 {
598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
599 switch (pIemCpu->enmEffOpSize)
600 {
601 case IEMMODE_16BIT:
602 IEM_MC_BEGIN(0, 1);
603 IEM_MC_LOCAL(uint16_t, u16Tr);
604 IEM_MC_FETCH_TR_U16(u16Tr);
605 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tr);
606 IEM_MC_ADVANCE_RIP();
607 IEM_MC_END();
608 break;
609
610 case IEMMODE_32BIT:
611 IEM_MC_BEGIN(0, 1);
612 IEM_MC_LOCAL(uint32_t, u32Tr);
613 IEM_MC_FETCH_TR_U32(u32Tr);
614 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tr);
615 IEM_MC_ADVANCE_RIP();
616 IEM_MC_END();
617 break;
618
619 case IEMMODE_64BIT:
620 IEM_MC_BEGIN(0, 1);
621 IEM_MC_LOCAL(uint64_t, u64Tr);
622 IEM_MC_FETCH_TR_U64(u64Tr);
623 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tr);
624 IEM_MC_ADVANCE_RIP();
625 IEM_MC_END();
626 break;
627
628 IEM_NOT_REACHED_DEFAULT_CASE_RET();
629 }
630 }
631 else
632 {
633 IEM_MC_BEGIN(0, 2);
634 IEM_MC_LOCAL(uint16_t, u16Tr);
635 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
638 IEM_MC_FETCH_TR_U16(u16Tr);
639 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tr);
640 IEM_MC_ADVANCE_RIP();
641 IEM_MC_END();
642 }
643 return VINF_SUCCESS;
644}
645
646
647/** Opcode 0x0f 0x00 /2. */
648FNIEMOP_DEF_1(iemOp_Grp6_lldt, uint8_t, bRm)
649{
650 IEMOP_MNEMONIC("lldt Ew");
651 IEMOP_HLP_NO_REAL_OR_V86_MODE();
652
653 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
654 {
655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
656 IEM_MC_BEGIN(1, 0);
657 IEM_MC_ARG(uint16_t, u16Sel, 0);
658 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
659 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
660 IEM_MC_END();
661 }
662 else
663 {
664 IEM_MC_BEGIN(1, 1);
665 IEM_MC_ARG(uint16_t, u16Sel, 0);
666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
667 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
670 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
671 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
672 IEM_MC_END();
673 }
674 return VINF_SUCCESS;
675}
676
677
678/** Opcode 0x0f 0x00 /3. */
679FNIEMOP_DEF_1(iemOp_Grp6_ltr, uint8_t, bRm)
680{
681 IEMOP_MNEMONIC("ltr Ew");
682 IEMOP_HLP_NO_REAL_OR_V86_MODE();
683
684 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
685 {
686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
687 IEM_MC_BEGIN(1, 0);
688 IEM_MC_ARG(uint16_t, u16Sel, 0);
689 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
690 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
691 IEM_MC_END();
692 }
693 else
694 {
695 IEM_MC_BEGIN(1, 1);
696 IEM_MC_ARG(uint16_t, u16Sel, 0);
697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
698 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
701 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
702 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
703 IEM_MC_END();
704 }
705 return VINF_SUCCESS;
706}
707
708
709/** Opcode 0x0f 0x00 /4. */
710FNIEMOP_STUB_1(iemOp_Grp6_verr, uint8_t, bRm);
711
712
713/** Opcode 0x0f 0x00 /5. */
714FNIEMOP_STUB_1(iemOp_Grp6_verw, uint8_t, bRm);
715
716
717/** Opcode 0x0f 0x00. */
718FNIEMOP_DEF(iemOp_Grp6)
719{
720 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
721 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
722 {
723 case 0: return FNIEMOP_CALL_1(iemOp_Grp6_sldt, bRm);
724 case 1: return FNIEMOP_CALL_1(iemOp_Grp6_str, bRm);
725 case 2: return FNIEMOP_CALL_1(iemOp_Grp6_lldt, bRm);
726 case 3: return FNIEMOP_CALL_1(iemOp_Grp6_ltr, bRm);
727 case 4: return FNIEMOP_CALL_1(iemOp_Grp6_verr, bRm);
728 case 5: return FNIEMOP_CALL_1(iemOp_Grp6_verw, bRm);
729 case 6: return IEMOP_RAISE_INVALID_OPCODE();
730 case 7: return IEMOP_RAISE_INVALID_OPCODE();
731 IEM_NOT_REACHED_DEFAULT_CASE_RET();
732 }
733
734}
735
736
737/** Opcode 0x0f 0x01 /0. */
738FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
739{
740 IEMOP_MNEMONIC("sgdt Ms");
741 IEMOP_HLP_64BIT_OP_SIZE();
742 IEM_MC_BEGIN(3, 1);
743 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/pIemCpu->iEffSeg, 0);
744 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
745 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
748 IEM_MC_CALL_CIMPL_3(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
749 IEM_MC_END();
750 return VINF_SUCCESS;
751}
752
753
754/** Opcode 0x0f 0x01 /0. */
755FNIEMOP_DEF(iemOp_Grp7_vmcall)
756{
757 IEMOP_BITCH_ABOUT_STUB();
758 return IEMOP_RAISE_INVALID_OPCODE();
759}
760
761
762/** Opcode 0x0f 0x01 /0. */
763FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
764{
765 IEMOP_BITCH_ABOUT_STUB();
766 return IEMOP_RAISE_INVALID_OPCODE();
767}
768
769
770/** Opcode 0x0f 0x01 /0. */
771FNIEMOP_DEF(iemOp_Grp7_vmresume)
772{
773 IEMOP_BITCH_ABOUT_STUB();
774 return IEMOP_RAISE_INVALID_OPCODE();
775}
776
777
778/** Opcode 0x0f 0x01 /0. */
779FNIEMOP_DEF(iemOp_Grp7_vmxoff)
780{
781 IEMOP_BITCH_ABOUT_STUB();
782 return IEMOP_RAISE_INVALID_OPCODE();
783}
784
785
786/** Opcode 0x0f 0x01 /1. */
787FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
788{
789 IEMOP_MNEMONIC("sidt Ms");
790 IEMOP_HLP_64BIT_OP_SIZE();
791 IEM_MC_BEGIN(3, 1);
792 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/pIemCpu->iEffSeg, 0);
793 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
794 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
797 IEM_MC_CALL_CIMPL_3(iemCImpl_sidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
798 IEM_MC_END();
799 return VINF_SUCCESS;
800}
801
802
803/** Opcode 0x0f 0x01 /1. */
804FNIEMOP_DEF(iemOp_Grp7_monitor)
805{
806 NOREF(pIemCpu);
807 IEMOP_BITCH_ABOUT_STUB();
808 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
809}
810
811
812/** Opcode 0x0f 0x01 /1. */
813FNIEMOP_DEF(iemOp_Grp7_mwait)
814{
815 NOREF(pIemCpu);
816 IEMOP_BITCH_ABOUT_STUB();
817 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
818}
819
820
821/** Opcode 0x0f 0x01 /2. */
822FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
823{
824 IEMOP_HLP_NO_LOCK_PREFIX();
825
826 IEMOP_HLP_64BIT_OP_SIZE();
827 IEM_MC_BEGIN(3, 1);
828 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/pIemCpu->iEffSeg, 0);
829 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
830 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
832 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
833 IEM_MC_END();
834 return VINF_SUCCESS;
835}
836
837
838/** Opcode 0x0f 0x01 /2. */
839FNIEMOP_DEF(iemOp_Grp7_xgetbv)
840{
841 AssertFailed();
842 return IEMOP_RAISE_INVALID_OPCODE();
843}
844
845
846/** Opcode 0x0f 0x01 /2. */
847FNIEMOP_DEF(iemOp_Grp7_xsetbv)
848{
849 AssertFailed();
850 return IEMOP_RAISE_INVALID_OPCODE();
851}
852
853
854/** Opcode 0x0f 0x01 /3. */
855FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
856{
857 IEMOP_HLP_NO_LOCK_PREFIX();
858
859 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
860 ? IEMMODE_64BIT
861 : pIemCpu->enmEffOpSize;
862 IEM_MC_BEGIN(3, 1);
863 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/pIemCpu->iEffSeg, 0);
864 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
865 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
867 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
868 IEM_MC_END();
869 return VINF_SUCCESS;
870}
871
872
873/** Opcode 0x0f 0x01 0xd8. */
874FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
875
876/** Opcode 0x0f 0x01 0xd9. */
877FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
878
879/** Opcode 0x0f 0x01 0xda. */
880FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
881
882/** Opcode 0x0f 0x01 0xdb. */
883FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
884
885/** Opcode 0x0f 0x01 0xdc. */
886FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
887
888/** Opcode 0x0f 0x01 0xdd. */
889FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
890
891/** Opcode 0x0f 0x01 0xde. */
892FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
893
894/** Opcode 0x0f 0x01 0xdf. */
895FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
896
897/** Opcode 0x0f 0x01 /4. */
898FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
899{
900 IEMOP_HLP_NO_LOCK_PREFIX();
901 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
902 {
903 switch (pIemCpu->enmEffOpSize)
904 {
905 case IEMMODE_16BIT:
906 IEM_MC_BEGIN(0, 1);
907 IEM_MC_LOCAL(uint16_t, u16Tmp);
908 IEM_MC_FETCH_CR0_U16(u16Tmp);
909 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tmp);
910 IEM_MC_ADVANCE_RIP();
911 IEM_MC_END();
912 return VINF_SUCCESS;
913
914 case IEMMODE_32BIT:
915 IEM_MC_BEGIN(0, 1);
916 IEM_MC_LOCAL(uint32_t, u32Tmp);
917 IEM_MC_FETCH_CR0_U32(u32Tmp);
918 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
919 IEM_MC_ADVANCE_RIP();
920 IEM_MC_END();
921 return VINF_SUCCESS;
922
923 case IEMMODE_64BIT:
924 IEM_MC_BEGIN(0, 1);
925 IEM_MC_LOCAL(uint64_t, u64Tmp);
926 IEM_MC_FETCH_CR0_U64(u64Tmp);
927 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
928 IEM_MC_ADVANCE_RIP();
929 IEM_MC_END();
930 return VINF_SUCCESS;
931
932 IEM_NOT_REACHED_DEFAULT_CASE_RET();
933 }
934 }
935 else
936 {
937 /* Ignore operand size here, memory refs are always 16-bit. */
938 IEM_MC_BEGIN(0, 2);
939 IEM_MC_LOCAL(uint16_t, u16Tmp);
940 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
942 IEM_MC_FETCH_CR0_U16(u16Tmp);
943 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
944 IEM_MC_ADVANCE_RIP();
945 IEM_MC_END();
946 return VINF_SUCCESS;
947 }
948}
949
950
951/** Opcode 0x0f 0x01 /6. */
952FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
953{
954 /* The operand size is effectively ignored, all is 16-bit and only the
955 lower 3-bits are used. */
956 IEMOP_HLP_NO_LOCK_PREFIX();
957 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
958 {
959 IEM_MC_BEGIN(1, 0);
960 IEM_MC_ARG(uint16_t, u16Tmp, 0);
961 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
962 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
963 IEM_MC_END();
964 }
965 else
966 {
967 IEM_MC_BEGIN(1, 1);
968 IEM_MC_ARG(uint16_t, u16Tmp, 0);
969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
971 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
972 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
973 IEM_MC_END();
974 }
975 return VINF_SUCCESS;
976}
977
978
979/** Opcode 0x0f 0x01 /7. */
980FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
981{
982 IEMOP_HLP_NO_LOCK_PREFIX();
983 IEM_MC_BEGIN(1, 1);
984 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
986 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
987 IEM_MC_END();
988 return VINF_SUCCESS;
989}
990
991
992/** Opcode 0x0f 0x01 /7. */
993FNIEMOP_DEF(iemOp_Grp7_swapgs)
994{
995 NOREF(pIemCpu);
996 IEMOP_BITCH_ABOUT_STUB();
997 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
998}
999
1000
1001/** Opcode 0x0f 0x01 /7. */
1002FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1003{
1004 NOREF(pIemCpu);
1005 IEMOP_BITCH_ABOUT_STUB();
1006 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1007}
1008
1009
1010/** Opcode 0x0f 0x01. */
1011FNIEMOP_DEF(iemOp_Grp7)
1012{
1013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1014 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1015 {
1016 case 0:
1017 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1018 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1019 switch (bRm & X86_MODRM_RM_MASK)
1020 {
1021 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1022 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1023 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1024 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1025 }
1026 return IEMOP_RAISE_INVALID_OPCODE();
1027
1028 case 1:
1029 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1030 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1031 switch (bRm & X86_MODRM_RM_MASK)
1032 {
1033 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1034 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1035 }
1036 return IEMOP_RAISE_INVALID_OPCODE();
1037
1038 case 2:
1039 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1040 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1041 switch (bRm & X86_MODRM_RM_MASK)
1042 {
1043 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1044 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1045 }
1046 return IEMOP_RAISE_INVALID_OPCODE();
1047
1048 case 3:
1049 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1050 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1051 switch (bRm & X86_MODRM_RM_MASK)
1052 {
1053 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1054 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1055 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1056 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1057 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1058 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1059 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1060 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1061 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1062 }
1063
1064 case 4:
1065 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1066
1067 case 5:
1068 return IEMOP_RAISE_INVALID_OPCODE();
1069
1070 case 6:
1071 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1072
1073 case 7:
1074 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1075 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1076 switch (bRm & X86_MODRM_RM_MASK)
1077 {
1078 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1079 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1080 }
1081 return IEMOP_RAISE_INVALID_OPCODE();
1082
1083 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1084 }
1085}
1086
1087
1088/** Opcode 0x0f 0x02. */
1089FNIEMOP_STUB(iemOp_lar_Gv_Ew);
1090/** Opcode 0x0f 0x03. */
1091FNIEMOP_STUB(iemOp_lsl_Gv_Ew);
1092/** Opcode 0x0f 0x04. */
1093FNIEMOP_STUB(iemOp_syscall);
1094
1095
1096/** Opcode 0x0f 0x05. */
1097FNIEMOP_DEF(iemOp_clts)
1098{
1099 IEMOP_MNEMONIC("clts");
1100 IEMOP_HLP_NO_LOCK_PREFIX();
1101 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1102}
1103
1104
1105/** Opcode 0x0f 0x06. */
1106FNIEMOP_STUB(iemOp_sysret);
1107/** Opcode 0x0f 0x08. */
1108FNIEMOP_STUB(iemOp_invd);
1109
1110
1111/** Opcode 0x0f 0x09. */
1112FNIEMOP_DEF(iemOp_wbinvd)
1113{
1114 IEMOP_MNEMONIC("wbinvd");
1115 IEMOP_HLP_NO_LOCK_PREFIX();
1116 IEM_MC_BEGIN(0, 0);
1117 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1118 IEM_MC_ADVANCE_RIP();
1119 IEM_MC_END();
1120 return VINF_SUCCESS; /* ignore for now */
1121}
1122
1123
1124/** Opcode 0x0f 0x0b. */
1125FNIEMOP_STUB(iemOp_ud2);
1126
1127/** Opcode 0x0f 0x0d. */
1128FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1129{
1130 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1131 if (!IEM_IS_AMD_CPUID_FEATURES_ANY_PRESENT(X86_CPUID_EXT_FEATURE_EDX_LONG_MODE | X86_CPUID_AMD_FEATURE_EDX_3DNOW,
1132 X86_CPUID_AMD_FEATURE_ECX_3DNOWPRF))
1133 {
1134 IEMOP_MNEMONIC("GrpP");
1135 return IEMOP_RAISE_INVALID_OPCODE();
1136 }
1137
1138 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1139 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1140 {
1141 IEMOP_MNEMONIC("GrpP");
1142 return IEMOP_RAISE_INVALID_OPCODE();
1143 }
1144
1145 IEMOP_HLP_NO_LOCK_PREFIX();
1146 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1147 {
1148 case 2: /* Aliased to /0 for the time being. */
1149 case 4: /* Aliased to /0 for the time being. */
1150 case 5: /* Aliased to /0 for the time being. */
1151 case 6: /* Aliased to /0 for the time being. */
1152 case 7: /* Aliased to /0 for the time being. */
1153 case 0: IEMOP_MNEMONIC("prefetch"); break;
1154 case 1: IEMOP_MNEMONIC("prefetchw "); break;
1155 case 3: IEMOP_MNEMONIC("prefetchw"); break;
1156 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1157 }
1158
1159 IEM_MC_BEGIN(0, 1);
1160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
1162 /* Currently a NOP. */
1163 IEM_MC_ADVANCE_RIP();
1164 IEM_MC_END();
1165 return VINF_SUCCESS;
1166}
1167
1168
1169/** Opcode 0x0f 0x0e. */
1170FNIEMOP_STUB(iemOp_femms);
1171
1172
1173/** Opcode 0x0f 0x0f 0x0c. */
1174FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1175
1176/** Opcode 0x0f 0x0f 0x0d. */
1177FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1178
1179/** Opcode 0x0f 0x0f 0x1c. */
1180FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1181
1182/** Opcode 0x0f 0x0f 0x1d. */
1183FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1184
1185/** Opcode 0x0f 0x0f 0x8a. */
1186FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1187
1188/** Opcode 0x0f 0x0f 0x8e. */
1189FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1190
1191/** Opcode 0x0f 0x0f 0x90. */
1192FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1193
1194/** Opcode 0x0f 0x0f 0x94. */
1195FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1196
1197/** Opcode 0x0f 0x0f 0x96. */
1198FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1199
1200/** Opcode 0x0f 0x0f 0x97. */
1201FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1202
1203/** Opcode 0x0f 0x0f 0x9a. */
1204FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1205
1206/** Opcode 0x0f 0x0f 0x9e. */
1207FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1208
1209/** Opcode 0x0f 0x0f 0xa0. */
1210FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1211
1212/** Opcode 0x0f 0x0f 0xa4. */
1213FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1214
1215/** Opcode 0x0f 0x0f 0xa6. */
1216FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1217
1218/** Opcode 0x0f 0x0f 0xa7. */
1219FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1220
1221/** Opcode 0x0f 0x0f 0xaa. */
1222FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1223
1224/** Opcode 0x0f 0x0f 0xae. */
1225FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1226
1227/** Opcode 0x0f 0x0f 0xb0. */
1228FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1229
1230/** Opcode 0x0f 0x0f 0xb4. */
1231FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1232
1233/** Opcode 0x0f 0x0f 0xb6. */
1234FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1235
1236/** Opcode 0x0f 0x0f 0xb7. */
1237FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1238
1239/** Opcode 0x0f 0x0f 0xbb. */
1240FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1241
1242/** Opcode 0x0f 0x0f 0xbf. */
1243FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1244
1245
1246/** Opcode 0x0f 0x0f. */
1247FNIEMOP_DEF(iemOp_3Dnow)
1248{
1249 if (!IEM_IS_AMD_CPUID_FEATURE_PRESENT_EDX(X86_CPUID_AMD_FEATURE_EDX_3DNOW))
1250 {
1251 IEMOP_MNEMONIC("3Dnow");
1252 return IEMOP_RAISE_INVALID_OPCODE();
1253 }
1254
1255 /* This is pretty sparse, use switch instead of table. */
1256 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1257 switch (b)
1258 {
1259 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1260 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1261 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1262 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1263 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1264 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1265 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1266 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1267 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1268 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1269 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1270 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1271 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1272 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1273 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1274 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1275 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1276 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1277 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1278 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1279 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1280 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1281 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1282 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1283 default:
1284 return IEMOP_RAISE_INVALID_OPCODE();
1285 }
1286}
1287
1288
1289/** Opcode 0x0f 0x10. */
1290FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1291/** Opcode 0x0f 0x11. */
1292FNIEMOP_STUB(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd);
1293/** Opcode 0x0f 0x12. */
1294FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq);
1295/** Opcode 0x0f 0x13. */
1296FNIEMOP_STUB(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq);
1297/** Opcode 0x0f 0x14. */
1298FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1299/** Opcode 0x0f 0x15. */
1300FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1301/** Opcode 0x0f 0x16. */
1302FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq);
1303/** Opcode 0x0f 0x17. */
1304FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq);
1305
1306
1307/** Opcode 0x0f 0x18. */
1308FNIEMOP_DEF(iemOp_prefetch_Grp16)
1309{
1310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1311 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1312 {
1313 IEMOP_HLP_NO_LOCK_PREFIX();
1314 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1315 {
1316 case 4: /* Aliased to /0 for the time being according to AMD. */
1317 case 5: /* Aliased to /0 for the time being according to AMD. */
1318 case 6: /* Aliased to /0 for the time being according to AMD. */
1319 case 7: /* Aliased to /0 for the time being according to AMD. */
1320 case 0: IEMOP_MNEMONIC("prefetchNTA m8"); break;
1321 case 1: IEMOP_MNEMONIC("prefetchT0 m8"); break;
1322 case 2: IEMOP_MNEMONIC("prefetchT1 m8"); break;
1323 case 3: IEMOP_MNEMONIC("prefetchT2 m8"); break;
1324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1325 }
1326
1327 IEM_MC_BEGIN(0, 1);
1328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
1330 /* Currently a NOP. */
1331 IEM_MC_ADVANCE_RIP();
1332 IEM_MC_END();
1333 return VINF_SUCCESS;
1334 }
1335
1336 return IEMOP_RAISE_INVALID_OPCODE();
1337}
1338
1339
1340/** Opcode 0x0f 0x19..0x1f. */
1341FNIEMOP_DEF(iemOp_nop_Ev)
1342{
1343 IEMOP_HLP_NO_LOCK_PREFIX();
1344 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1345 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1346 {
1347 IEM_MC_BEGIN(0, 0);
1348 IEM_MC_ADVANCE_RIP();
1349 IEM_MC_END();
1350 }
1351 else
1352 {
1353 IEM_MC_BEGIN(0, 1);
1354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
1356 /* Currently a NOP. */
1357 IEM_MC_ADVANCE_RIP();
1358 IEM_MC_END();
1359 }
1360 return VINF_SUCCESS;
1361}
1362
1363
1364/** Opcode 0x0f 0x20. */
1365FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1366{
1367 /* mod is ignored, as is operand size overrides. */
1368 IEMOP_MNEMONIC("mov Rd,Cd");
1369 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1370 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1371 else
1372 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1373
1374 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1375 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1376 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1377 {
1378 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1379 if (!IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_AMD_FEATURE_ECX_CR8L))
1380 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); /* #UD takes precedence over #GP(), see test. */
1381 iCrReg |= 8;
1382 }
1383 switch (iCrReg)
1384 {
1385 case 0: case 2: case 3: case 4: case 8:
1386 break;
1387 default:
1388 return IEMOP_RAISE_INVALID_OPCODE();
1389 }
1390 IEMOP_HLP_DONE_DECODING();
1391
1392 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB, iCrReg);
1393}
1394
1395
1396/** Opcode 0x0f 0x21. */
1397FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1398{
1399 IEMOP_MNEMONIC("mov Rd,Dd");
1400 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1401 IEMOP_HLP_NO_LOCK_PREFIX();
1402 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1403 return IEMOP_RAISE_INVALID_OPCODE();
1404 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1405 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB,
1406 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1407}
1408
1409
1410/** Opcode 0x0f 0x22. */
1411FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1412{
1413 /* mod is ignored, as is operand size overrides. */
1414 IEMOP_MNEMONIC("mov Cd,Rd");
1415 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1416 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1417 else
1418 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1419
1420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1421 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1422 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1423 {
1424 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1425 if (!IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_AMD_FEATURE_ECX_CR8L))
1426 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); /* #UD takes precedence over #GP(), see test. */
1427 iCrReg |= 8;
1428 }
1429 switch (iCrReg)
1430 {
1431 case 0: case 2: case 3: case 4: case 8:
1432 break;
1433 default:
1434 return IEMOP_RAISE_INVALID_OPCODE();
1435 }
1436 IEMOP_HLP_DONE_DECODING();
1437
1438 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1439}
1440
1441
1442/** Opcode 0x0f 0x23. */
1443FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1444{
1445 IEMOP_MNEMONIC("mov Dd,Rd");
1446 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1448 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1449 return IEMOP_RAISE_INVALID_OPCODE();
1450 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1451 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1452 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1453}
1454
1455
1456/** Opcode 0x0f 0x24. */
1457FNIEMOP_DEF(iemOp_mov_Rd_Td)
1458{
1459 IEMOP_MNEMONIC("mov Rd,Td");
1460 /* The RM byte is not considered, see testcase. */
1461 return IEMOP_RAISE_INVALID_OPCODE();
1462}
1463
1464
1465/** Opcode 0x0f 0x26. */
1466FNIEMOP_DEF(iemOp_mov_Td_Rd)
1467{
1468 IEMOP_MNEMONIC("mov Td,Rd");
1469 /* The RM byte is not considered, see testcase. */
1470 return IEMOP_RAISE_INVALID_OPCODE();
1471}
1472
1473
1474/** Opcode 0x0f 0x28. */
1475FNIEMOP_STUB(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd);
1476/** Opcode 0x0f 0x29. */
1477FNIEMOP_STUB(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd);
1478/** Opcode 0x0f 0x2a. */
1479FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey);
1480/** Opcode 0x0f 0x2b. */
1481FNIEMOP_STUB(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd);
1482/** Opcode 0x0f 0x2c. */
1483FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd);
1484/** Opcode 0x0f 0x2d. */
1485FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
1486/** Opcode 0x0f 0x2e. */
1487FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd);
1488/** Opcode 0x0f 0x2f. */
1489FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
1490
1491
1492/** Opcode 0x0f 0x30. */
1493FNIEMOP_DEF(iemOp_wrmsr)
1494{
1495 IEMOP_MNEMONIC("wrmsr");
1496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1497 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1498}
1499
1500
1501/** Opcode 0x0f 0x31. */
1502FNIEMOP_DEF(iemOp_rdtsc)
1503{
1504 IEMOP_MNEMONIC("rdtsc");
1505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1506 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1507}
1508
1509
1510/** Opcode 0x0f 0x33. */
1511FNIEMOP_DEF(iemOp_rdmsr)
1512{
1513 IEMOP_MNEMONIC("rdmsr");
1514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1515 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1516}
1517
1518
1519/** Opcode 0x0f 0x34. */
1520FNIEMOP_STUB(iemOp_rdpmc);
1521/** Opcode 0x0f 0x34. */
1522FNIEMOP_STUB(iemOp_sysenter);
1523/** Opcode 0x0f 0x35. */
1524FNIEMOP_STUB(iemOp_sysexit);
1525/** Opcode 0x0f 0x37. */
1526FNIEMOP_STUB(iemOp_getsec);
1527/** Opcode 0x0f 0x38. */
1528FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1529/** Opcode 0x0f 0x3a. */
1530FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1531/** Opcode 0x0f 0x3c (?). */
1532FNIEMOP_STUB(iemOp_movnti_Gv_Ev);
1533
1534/**
1535 * Implements a conditional move.
1536 *
1537 * Wish there was an obvious way to do this where we could share and reduce
1538 * code bloat.
1539 *
1540 * @param a_Cnd The conditional "microcode" operation.
1541 */
1542#define CMOV_X(a_Cnd) \
1543 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1544 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1545 { \
1546 switch (pIemCpu->enmEffOpSize) \
1547 { \
1548 case IEMMODE_16BIT: \
1549 IEM_MC_BEGIN(0, 1); \
1550 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1551 a_Cnd { \
1552 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1553 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1554 } IEM_MC_ENDIF(); \
1555 IEM_MC_ADVANCE_RIP(); \
1556 IEM_MC_END(); \
1557 return VINF_SUCCESS; \
1558 \
1559 case IEMMODE_32BIT: \
1560 IEM_MC_BEGIN(0, 1); \
1561 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1562 a_Cnd { \
1563 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1564 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1565 } IEM_MC_ELSE() { \
1566 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1567 } IEM_MC_ENDIF(); \
1568 IEM_MC_ADVANCE_RIP(); \
1569 IEM_MC_END(); \
1570 return VINF_SUCCESS; \
1571 \
1572 case IEMMODE_64BIT: \
1573 IEM_MC_BEGIN(0, 1); \
1574 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1575 a_Cnd { \
1576 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1577 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1578 } IEM_MC_ENDIF(); \
1579 IEM_MC_ADVANCE_RIP(); \
1580 IEM_MC_END(); \
1581 return VINF_SUCCESS; \
1582 \
1583 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1584 } \
1585 } \
1586 else \
1587 { \
1588 switch (pIemCpu->enmEffOpSize) \
1589 { \
1590 case IEMMODE_16BIT: \
1591 IEM_MC_BEGIN(0, 2); \
1592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1593 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm); \
1595 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1596 a_Cnd { \
1597 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1598 } IEM_MC_ENDIF(); \
1599 IEM_MC_ADVANCE_RIP(); \
1600 IEM_MC_END(); \
1601 return VINF_SUCCESS; \
1602 \
1603 case IEMMODE_32BIT: \
1604 IEM_MC_BEGIN(0, 2); \
1605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1606 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1607 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm); \
1608 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1609 a_Cnd { \
1610 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1611 } IEM_MC_ELSE() { \
1612 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1613 } IEM_MC_ENDIF(); \
1614 IEM_MC_ADVANCE_RIP(); \
1615 IEM_MC_END(); \
1616 return VINF_SUCCESS; \
1617 \
1618 case IEMMODE_64BIT: \
1619 IEM_MC_BEGIN(0, 2); \
1620 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1621 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm); \
1623 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1624 a_Cnd { \
1625 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1626 } IEM_MC_ENDIF(); \
1627 IEM_MC_ADVANCE_RIP(); \
1628 IEM_MC_END(); \
1629 return VINF_SUCCESS; \
1630 \
1631 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1632 } \
1633 } do {} while (0)
1634
1635
1636
1637/** Opcode 0x0f 0x40. */
1638FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1639{
1640 IEMOP_MNEMONIC("cmovo Gv,Ev");
1641 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1642}
1643
1644
1645/** Opcode 0x0f 0x41. */
1646FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1647{
1648 IEMOP_MNEMONIC("cmovno Gv,Ev");
1649 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1650}
1651
1652
1653/** Opcode 0x0f 0x42. */
1654FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
1655{
1656 IEMOP_MNEMONIC("cmovc Gv,Ev");
1657 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
1658}
1659
1660
1661/** Opcode 0x0f 0x43. */
1662FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
1663{
1664 IEMOP_MNEMONIC("cmovnc Gv,Ev");
1665 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
1666}
1667
1668
1669/** Opcode 0x0f 0x44. */
1670FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
1671{
1672 IEMOP_MNEMONIC("cmove Gv,Ev");
1673 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
1674}
1675
1676
1677/** Opcode 0x0f 0x45. */
1678FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
1679{
1680 IEMOP_MNEMONIC("cmovne Gv,Ev");
1681 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
1682}
1683
1684
1685/** Opcode 0x0f 0x46. */
1686FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
1687{
1688 IEMOP_MNEMONIC("cmovbe Gv,Ev");
1689 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1690}
1691
1692
1693/** Opcode 0x0f 0x47. */
1694FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
1695{
1696 IEMOP_MNEMONIC("cmovnbe Gv,Ev");
1697 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1698}
1699
1700
1701/** Opcode 0x0f 0x48. */
1702FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
1703{
1704 IEMOP_MNEMONIC("cmovs Gv,Ev");
1705 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
1706}
1707
1708
1709/** Opcode 0x0f 0x49. */
1710FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
1711{
1712 IEMOP_MNEMONIC("cmovns Gv,Ev");
1713 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
1714}
1715
1716
1717/** Opcode 0x0f 0x4a. */
1718FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
1719{
1720 IEMOP_MNEMONIC("cmovp Gv,Ev");
1721 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
1722}
1723
1724
1725/** Opcode 0x0f 0x4b. */
1726FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
1727{
1728 IEMOP_MNEMONIC("cmovnp Gv,Ev");
1729 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
1730}
1731
1732
1733/** Opcode 0x0f 0x4c. */
1734FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
1735{
1736 IEMOP_MNEMONIC("cmovl Gv,Ev");
1737 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
1738}
1739
1740
1741/** Opcode 0x0f 0x4d. */
1742FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
1743{
1744 IEMOP_MNEMONIC("cmovnl Gv,Ev");
1745 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
1746}
1747
1748
1749/** Opcode 0x0f 0x4e. */
1750FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
1751{
1752 IEMOP_MNEMONIC("cmovle Gv,Ev");
1753 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1754}
1755
1756
1757/** Opcode 0x0f 0x4f. */
1758FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
1759{
1760 IEMOP_MNEMONIC("cmovnle Gv,Ev");
1761 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1762}
1763
1764#undef CMOV_X
1765
1766/** Opcode 0x0f 0x50. */
1767FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
1768/** Opcode 0x0f 0x51. */
1769FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
1770/** Opcode 0x0f 0x52. */
1771FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
1772/** Opcode 0x0f 0x53. */
1773FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
1774/** Opcode 0x0f 0x54. */
1775FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
1776/** Opcode 0x0f 0x55. */
1777FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
1778/** Opcode 0x0f 0x56. */
1779FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
1780/** Opcode 0x0f 0x57. */
1781FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
1782/** Opcode 0x0f 0x58. */
1783FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd);
1784/** Opcode 0x0f 0x59. */
1785FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);
1786/** Opcode 0x0f 0x5a. */
1787FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
1788/** Opcode 0x0f 0x5b. */
1789FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
1790/** Opcode 0x0f 0x5c. */
1791FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
1792/** Opcode 0x0f 0x5d. */
1793FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
1794/** Opcode 0x0f 0x5e. */
1795FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
1796/** Opcode 0x0f 0x5f. */
1797FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
1798/** Opcode 0x0f 0x60. */
1799FNIEMOP_STUB(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq);
1800/** Opcode 0x0f 0x61. */
1801FNIEMOP_STUB(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq);
1802/** Opcode 0x0f 0x62. */
1803FNIEMOP_STUB(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq);
1804/** Opcode 0x0f 0x63. */
1805FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
1806/** Opcode 0x0f 0x64. */
1807FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
1808/** Opcode 0x0f 0x65. */
1809FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
1810/** Opcode 0x0f 0x66. */
1811FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
1812/** Opcode 0x0f 0x67. */
1813FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
1814/** Opcode 0x0f 0x68. */
1815FNIEMOP_STUB(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq);
1816/** Opcode 0x0f 0x69. */
1817FNIEMOP_STUB(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq);
1818/** Opcode 0x0f 0x6a. */
1819FNIEMOP_STUB(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq);
1820/** Opcode 0x0f 0x6b. */
1821FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
1822/** Opcode 0x0f 0x6c. */
1823FNIEMOP_STUB(iemOp_punpcklqdq_Vdq_Wdq);
1824/** Opcode 0x0f 0x6d. */
1825FNIEMOP_STUB(iemOp_punpckhqdq_Vdq_Wdq);
1826/** Opcode 0x0f 0x6e. */
1827FNIEMOP_STUB(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey);
1828/** Opcode 0x0f 0x6f. */
1829FNIEMOP_STUB(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq);
1830/** Opcode 0x0f 0x70. */
1831FNIEMOP_STUB(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib);
1832
1833/** Opcode 0x0f 0x71 11/2. */
1834FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
1835
1836/** Opcode 0x66 0x0f 0x71 11/2. */
1837FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
1838
1839/** Opcode 0x0f 0x71 11/4. */
1840FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
1841
1842/** Opcode 0x66 0x0f 0x71 11/4. */
1843FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
1844
1845/** Opcode 0x0f 0x71 11/6. */
1846FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
1847
1848/** Opcode 0x66 0x0f 0x71 11/6. */
1849FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
1850
1851
1852/** Opcode 0x0f 0x71. */
1853FNIEMOP_DEF(iemOp_Grp12)
1854{
1855 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1856 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1857 return IEMOP_RAISE_INVALID_OPCODE();
1858 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1859 {
1860 case 0: case 1: case 3: case 5: case 7:
1861 return IEMOP_RAISE_INVALID_OPCODE();
1862 case 2:
1863 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1864 {
1865 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
1866 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
1867 default: return IEMOP_RAISE_INVALID_OPCODE();
1868 }
1869 case 4:
1870 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1871 {
1872 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
1873 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
1874 default: return IEMOP_RAISE_INVALID_OPCODE();
1875 }
1876 case 6:
1877 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1878 {
1879 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
1880 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
1881 default: return IEMOP_RAISE_INVALID_OPCODE();
1882 }
1883 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1884 }
1885}
1886
1887
1888/** Opcode 0x0f 0x72 11/2. */
1889FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
1890
1891/** Opcode 0x66 0x0f 0x72 11/2. */
1892FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
1893
1894/** Opcode 0x0f 0x72 11/4. */
1895FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
1896
1897/** Opcode 0x66 0x0f 0x72 11/4. */
1898FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
1899
1900/** Opcode 0x0f 0x72 11/6. */
1901FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
1902
1903/** Opcode 0x66 0x0f 0x72 11/6. */
1904FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
1905
1906
1907/** Opcode 0x0f 0x72. */
1908FNIEMOP_DEF(iemOp_Grp13)
1909{
1910 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1911 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1912 return IEMOP_RAISE_INVALID_OPCODE();
1913 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1914 {
1915 case 0: case 1: case 3: case 5: case 7:
1916 return IEMOP_RAISE_INVALID_OPCODE();
1917 case 2:
1918 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1919 {
1920 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
1921 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
1922 default: return IEMOP_RAISE_INVALID_OPCODE();
1923 }
1924 case 4:
1925 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1926 {
1927 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
1928 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
1929 default: return IEMOP_RAISE_INVALID_OPCODE();
1930 }
1931 case 6:
1932 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1933 {
1934 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
1935 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
1936 default: return IEMOP_RAISE_INVALID_OPCODE();
1937 }
1938 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1939 }
1940}
1941
1942
1943/** Opcode 0x0f 0x73 11/2. */
1944FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
1945
1946/** Opcode 0x66 0x0f 0x73 11/2. */
1947FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
1948
1949/** Opcode 0x66 0x0f 0x73 11/3. */
1950FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm);
1951
1952/** Opcode 0x0f 0x73 11/6. */
1953FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
1954
1955/** Opcode 0x66 0x0f 0x73 11/6. */
1956FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
1957
1958/** Opcode 0x66 0x0f 0x73 11/7. */
1959FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm);
1960
1961
1962/** Opcode 0x0f 0x73. */
1963FNIEMOP_DEF(iemOp_Grp14)
1964{
1965 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1966 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1967 return IEMOP_RAISE_INVALID_OPCODE();
1968 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1969 {
1970 case 0: case 1: case 4: case 5:
1971 return IEMOP_RAISE_INVALID_OPCODE();
1972 case 2:
1973 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1974 {
1975 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
1976 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
1977 default: return IEMOP_RAISE_INVALID_OPCODE();
1978 }
1979 case 3:
1980 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1981 {
1982 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
1983 default: return IEMOP_RAISE_INVALID_OPCODE();
1984 }
1985 case 6:
1986 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1987 {
1988 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
1989 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
1990 default: return IEMOP_RAISE_INVALID_OPCODE();
1991 }
1992 case 7:
1993 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1994 {
1995 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
1996 default: return IEMOP_RAISE_INVALID_OPCODE();
1997 }
1998 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1999 }
2000}
2001
2002
2003/** Opcode 0x0f 0x74. */
2004FNIEMOP_STUB(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq);
2005/** Opcode 0x0f 0x75. */
2006FNIEMOP_STUB(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq);
2007/** Opcode 0x0f 0x76. */
2008FNIEMOP_STUB(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq);
2009/** Opcode 0x0f 0x77. */
2010FNIEMOP_STUB(iemOp_emms);
2011/** Opcode 0x0f 0x78. */
2012FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
2013/** Opcode 0x0f 0x79. */
2014FNIEMOP_UD_STUB(iemOp_vmwrite);
2015/** Opcode 0x0f 0x7c. */
2016FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
2017/** Opcode 0x0f 0x7d. */
2018FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
2019/** Opcode 0x0f 0x7e. */
2020FNIEMOP_STUB(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq);
2021/** Opcode 0x0f 0x7f. */
2022FNIEMOP_STUB(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq);
2023
2024
2025/** Opcode 0x0f 0x80. */
2026FNIEMOP_DEF(iemOp_jo_Jv)
2027{
2028 IEMOP_MNEMONIC("jo Jv");
2029 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2030 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2031 {
2032 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2033 IEMOP_HLP_NO_LOCK_PREFIX();
2034
2035 IEM_MC_BEGIN(0, 0);
2036 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2037 IEM_MC_REL_JMP_S16(i16Imm);
2038 } IEM_MC_ELSE() {
2039 IEM_MC_ADVANCE_RIP();
2040 } IEM_MC_ENDIF();
2041 IEM_MC_END();
2042 }
2043 else
2044 {
2045 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2046 IEMOP_HLP_NO_LOCK_PREFIX();
2047
2048 IEM_MC_BEGIN(0, 0);
2049 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2050 IEM_MC_REL_JMP_S32(i32Imm);
2051 } IEM_MC_ELSE() {
2052 IEM_MC_ADVANCE_RIP();
2053 } IEM_MC_ENDIF();
2054 IEM_MC_END();
2055 }
2056 return VINF_SUCCESS;
2057}
2058
2059
2060/** Opcode 0x0f 0x81. */
2061FNIEMOP_DEF(iemOp_jno_Jv)
2062{
2063 IEMOP_MNEMONIC("jno Jv");
2064 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2065 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2066 {
2067 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2068 IEMOP_HLP_NO_LOCK_PREFIX();
2069
2070 IEM_MC_BEGIN(0, 0);
2071 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2072 IEM_MC_ADVANCE_RIP();
2073 } IEM_MC_ELSE() {
2074 IEM_MC_REL_JMP_S16(i16Imm);
2075 } IEM_MC_ENDIF();
2076 IEM_MC_END();
2077 }
2078 else
2079 {
2080 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2081 IEMOP_HLP_NO_LOCK_PREFIX();
2082
2083 IEM_MC_BEGIN(0, 0);
2084 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2085 IEM_MC_ADVANCE_RIP();
2086 } IEM_MC_ELSE() {
2087 IEM_MC_REL_JMP_S32(i32Imm);
2088 } IEM_MC_ENDIF();
2089 IEM_MC_END();
2090 }
2091 return VINF_SUCCESS;
2092}
2093
2094
2095/** Opcode 0x0f 0x82. */
2096FNIEMOP_DEF(iemOp_jc_Jv)
2097{
2098 IEMOP_MNEMONIC("jc/jb/jnae Jv");
2099 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2100 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2101 {
2102 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2103 IEMOP_HLP_NO_LOCK_PREFIX();
2104
2105 IEM_MC_BEGIN(0, 0);
2106 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2107 IEM_MC_REL_JMP_S16(i16Imm);
2108 } IEM_MC_ELSE() {
2109 IEM_MC_ADVANCE_RIP();
2110 } IEM_MC_ENDIF();
2111 IEM_MC_END();
2112 }
2113 else
2114 {
2115 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2116 IEMOP_HLP_NO_LOCK_PREFIX();
2117
2118 IEM_MC_BEGIN(0, 0);
2119 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2120 IEM_MC_REL_JMP_S32(i32Imm);
2121 } IEM_MC_ELSE() {
2122 IEM_MC_ADVANCE_RIP();
2123 } IEM_MC_ENDIF();
2124 IEM_MC_END();
2125 }
2126 return VINF_SUCCESS;
2127}
2128
2129
2130/** Opcode 0x0f 0x83. */
2131FNIEMOP_DEF(iemOp_jnc_Jv)
2132{
2133 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
2134 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2135 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2136 {
2137 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2138 IEMOP_HLP_NO_LOCK_PREFIX();
2139
2140 IEM_MC_BEGIN(0, 0);
2141 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2142 IEM_MC_ADVANCE_RIP();
2143 } IEM_MC_ELSE() {
2144 IEM_MC_REL_JMP_S16(i16Imm);
2145 } IEM_MC_ENDIF();
2146 IEM_MC_END();
2147 }
2148 else
2149 {
2150 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2151 IEMOP_HLP_NO_LOCK_PREFIX();
2152
2153 IEM_MC_BEGIN(0, 0);
2154 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2155 IEM_MC_ADVANCE_RIP();
2156 } IEM_MC_ELSE() {
2157 IEM_MC_REL_JMP_S32(i32Imm);
2158 } IEM_MC_ENDIF();
2159 IEM_MC_END();
2160 }
2161 return VINF_SUCCESS;
2162}
2163
2164
2165/** Opcode 0x0f 0x84. */
2166FNIEMOP_DEF(iemOp_je_Jv)
2167{
2168 IEMOP_MNEMONIC("je/jz Jv");
2169 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2170 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2171 {
2172 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2173 IEMOP_HLP_NO_LOCK_PREFIX();
2174
2175 IEM_MC_BEGIN(0, 0);
2176 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2177 IEM_MC_REL_JMP_S16(i16Imm);
2178 } IEM_MC_ELSE() {
2179 IEM_MC_ADVANCE_RIP();
2180 } IEM_MC_ENDIF();
2181 IEM_MC_END();
2182 }
2183 else
2184 {
2185 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2186 IEMOP_HLP_NO_LOCK_PREFIX();
2187
2188 IEM_MC_BEGIN(0, 0);
2189 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2190 IEM_MC_REL_JMP_S32(i32Imm);
2191 } IEM_MC_ELSE() {
2192 IEM_MC_ADVANCE_RIP();
2193 } IEM_MC_ENDIF();
2194 IEM_MC_END();
2195 }
2196 return VINF_SUCCESS;
2197}
2198
2199
2200/** Opcode 0x0f 0x85. */
2201FNIEMOP_DEF(iemOp_jne_Jv)
2202{
2203 IEMOP_MNEMONIC("jne/jnz Jv");
2204 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2205 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2206 {
2207 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2208 IEMOP_HLP_NO_LOCK_PREFIX();
2209
2210 IEM_MC_BEGIN(0, 0);
2211 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2212 IEM_MC_ADVANCE_RIP();
2213 } IEM_MC_ELSE() {
2214 IEM_MC_REL_JMP_S16(i16Imm);
2215 } IEM_MC_ENDIF();
2216 IEM_MC_END();
2217 }
2218 else
2219 {
2220 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2221 IEMOP_HLP_NO_LOCK_PREFIX();
2222
2223 IEM_MC_BEGIN(0, 0);
2224 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2225 IEM_MC_ADVANCE_RIP();
2226 } IEM_MC_ELSE() {
2227 IEM_MC_REL_JMP_S32(i32Imm);
2228 } IEM_MC_ENDIF();
2229 IEM_MC_END();
2230 }
2231 return VINF_SUCCESS;
2232}
2233
2234
2235/** Opcode 0x0f 0x86. */
2236FNIEMOP_DEF(iemOp_jbe_Jv)
2237{
2238 IEMOP_MNEMONIC("jbe/jna Jv");
2239 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2240 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2241 {
2242 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2243 IEMOP_HLP_NO_LOCK_PREFIX();
2244
2245 IEM_MC_BEGIN(0, 0);
2246 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2247 IEM_MC_REL_JMP_S16(i16Imm);
2248 } IEM_MC_ELSE() {
2249 IEM_MC_ADVANCE_RIP();
2250 } IEM_MC_ENDIF();
2251 IEM_MC_END();
2252 }
2253 else
2254 {
2255 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2256 IEMOP_HLP_NO_LOCK_PREFIX();
2257
2258 IEM_MC_BEGIN(0, 0);
2259 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2260 IEM_MC_REL_JMP_S32(i32Imm);
2261 } IEM_MC_ELSE() {
2262 IEM_MC_ADVANCE_RIP();
2263 } IEM_MC_ENDIF();
2264 IEM_MC_END();
2265 }
2266 return VINF_SUCCESS;
2267}
2268
2269
2270/** Opcode 0x0f 0x87. */
2271FNIEMOP_DEF(iemOp_jnbe_Jv)
2272{
2273 IEMOP_MNEMONIC("jnbe/ja Jv");
2274 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2275 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2276 {
2277 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2278 IEMOP_HLP_NO_LOCK_PREFIX();
2279
2280 IEM_MC_BEGIN(0, 0);
2281 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2282 IEM_MC_ADVANCE_RIP();
2283 } IEM_MC_ELSE() {
2284 IEM_MC_REL_JMP_S16(i16Imm);
2285 } IEM_MC_ENDIF();
2286 IEM_MC_END();
2287 }
2288 else
2289 {
2290 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2291 IEMOP_HLP_NO_LOCK_PREFIX();
2292
2293 IEM_MC_BEGIN(0, 0);
2294 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2295 IEM_MC_ADVANCE_RIP();
2296 } IEM_MC_ELSE() {
2297 IEM_MC_REL_JMP_S32(i32Imm);
2298 } IEM_MC_ENDIF();
2299 IEM_MC_END();
2300 }
2301 return VINF_SUCCESS;
2302}
2303
2304
2305/** Opcode 0x0f 0x88. */
2306FNIEMOP_DEF(iemOp_js_Jv)
2307{
2308 IEMOP_MNEMONIC("js Jv");
2309 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2310 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2311 {
2312 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2313 IEMOP_HLP_NO_LOCK_PREFIX();
2314
2315 IEM_MC_BEGIN(0, 0);
2316 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2317 IEM_MC_REL_JMP_S16(i16Imm);
2318 } IEM_MC_ELSE() {
2319 IEM_MC_ADVANCE_RIP();
2320 } IEM_MC_ENDIF();
2321 IEM_MC_END();
2322 }
2323 else
2324 {
2325 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2326 IEMOP_HLP_NO_LOCK_PREFIX();
2327
2328 IEM_MC_BEGIN(0, 0);
2329 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2330 IEM_MC_REL_JMP_S32(i32Imm);
2331 } IEM_MC_ELSE() {
2332 IEM_MC_ADVANCE_RIP();
2333 } IEM_MC_ENDIF();
2334 IEM_MC_END();
2335 }
2336 return VINF_SUCCESS;
2337}
2338
2339
2340/** Opcode 0x0f 0x89. */
2341FNIEMOP_DEF(iemOp_jns_Jv)
2342{
2343 IEMOP_MNEMONIC("jns Jv");
2344 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2345 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2346 {
2347 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2348 IEMOP_HLP_NO_LOCK_PREFIX();
2349
2350 IEM_MC_BEGIN(0, 0);
2351 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2352 IEM_MC_ADVANCE_RIP();
2353 } IEM_MC_ELSE() {
2354 IEM_MC_REL_JMP_S16(i16Imm);
2355 } IEM_MC_ENDIF();
2356 IEM_MC_END();
2357 }
2358 else
2359 {
2360 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2361 IEMOP_HLP_NO_LOCK_PREFIX();
2362
2363 IEM_MC_BEGIN(0, 0);
2364 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2365 IEM_MC_ADVANCE_RIP();
2366 } IEM_MC_ELSE() {
2367 IEM_MC_REL_JMP_S32(i32Imm);
2368 } IEM_MC_ENDIF();
2369 IEM_MC_END();
2370 }
2371 return VINF_SUCCESS;
2372}
2373
2374
2375/** Opcode 0x0f 0x8a. */
2376FNIEMOP_DEF(iemOp_jp_Jv)
2377{
2378 IEMOP_MNEMONIC("jp Jv");
2379 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2380 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2381 {
2382 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2383 IEMOP_HLP_NO_LOCK_PREFIX();
2384
2385 IEM_MC_BEGIN(0, 0);
2386 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2387 IEM_MC_REL_JMP_S16(i16Imm);
2388 } IEM_MC_ELSE() {
2389 IEM_MC_ADVANCE_RIP();
2390 } IEM_MC_ENDIF();
2391 IEM_MC_END();
2392 }
2393 else
2394 {
2395 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2396 IEMOP_HLP_NO_LOCK_PREFIX();
2397
2398 IEM_MC_BEGIN(0, 0);
2399 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2400 IEM_MC_REL_JMP_S32(i32Imm);
2401 } IEM_MC_ELSE() {
2402 IEM_MC_ADVANCE_RIP();
2403 } IEM_MC_ENDIF();
2404 IEM_MC_END();
2405 }
2406 return VINF_SUCCESS;
2407}
2408
2409
2410/** Opcode 0x0f 0x8b. */
2411FNIEMOP_DEF(iemOp_jnp_Jv)
2412{
2413 IEMOP_MNEMONIC("jo Jv");
2414 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2415 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2416 {
2417 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2418 IEMOP_HLP_NO_LOCK_PREFIX();
2419
2420 IEM_MC_BEGIN(0, 0);
2421 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2422 IEM_MC_ADVANCE_RIP();
2423 } IEM_MC_ELSE() {
2424 IEM_MC_REL_JMP_S16(i16Imm);
2425 } IEM_MC_ENDIF();
2426 IEM_MC_END();
2427 }
2428 else
2429 {
2430 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2431 IEMOP_HLP_NO_LOCK_PREFIX();
2432
2433 IEM_MC_BEGIN(0, 0);
2434 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2435 IEM_MC_ADVANCE_RIP();
2436 } IEM_MC_ELSE() {
2437 IEM_MC_REL_JMP_S32(i32Imm);
2438 } IEM_MC_ENDIF();
2439 IEM_MC_END();
2440 }
2441 return VINF_SUCCESS;
2442}
2443
2444
2445/** Opcode 0x0f 0x8c. */
2446FNIEMOP_DEF(iemOp_jl_Jv)
2447{
2448 IEMOP_MNEMONIC("jl/jnge Jv");
2449 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2450 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2451 {
2452 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2453 IEMOP_HLP_NO_LOCK_PREFIX();
2454
2455 IEM_MC_BEGIN(0, 0);
2456 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2457 IEM_MC_REL_JMP_S16(i16Imm);
2458 } IEM_MC_ELSE() {
2459 IEM_MC_ADVANCE_RIP();
2460 } IEM_MC_ENDIF();
2461 IEM_MC_END();
2462 }
2463 else
2464 {
2465 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2466 IEMOP_HLP_NO_LOCK_PREFIX();
2467
2468 IEM_MC_BEGIN(0, 0);
2469 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2470 IEM_MC_REL_JMP_S32(i32Imm);
2471 } IEM_MC_ELSE() {
2472 IEM_MC_ADVANCE_RIP();
2473 } IEM_MC_ENDIF();
2474 IEM_MC_END();
2475 }
2476 return VINF_SUCCESS;
2477}
2478
2479
2480/** Opcode 0x0f 0x8d. */
2481FNIEMOP_DEF(iemOp_jnl_Jv)
2482{
2483 IEMOP_MNEMONIC("jnl/jge Jv");
2484 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2485 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2486 {
2487 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2488 IEMOP_HLP_NO_LOCK_PREFIX();
2489
2490 IEM_MC_BEGIN(0, 0);
2491 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2492 IEM_MC_ADVANCE_RIP();
2493 } IEM_MC_ELSE() {
2494 IEM_MC_REL_JMP_S16(i16Imm);
2495 } IEM_MC_ENDIF();
2496 IEM_MC_END();
2497 }
2498 else
2499 {
2500 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2501 IEMOP_HLP_NO_LOCK_PREFIX();
2502
2503 IEM_MC_BEGIN(0, 0);
2504 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2505 IEM_MC_ADVANCE_RIP();
2506 } IEM_MC_ELSE() {
2507 IEM_MC_REL_JMP_S32(i32Imm);
2508 } IEM_MC_ENDIF();
2509 IEM_MC_END();
2510 }
2511 return VINF_SUCCESS;
2512}
2513
2514
2515/** Opcode 0x0f 0x8e. */
2516FNIEMOP_DEF(iemOp_jle_Jv)
2517{
2518 IEMOP_MNEMONIC("jle/jng Jv");
2519 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2520 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2521 {
2522 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2523 IEMOP_HLP_NO_LOCK_PREFIX();
2524
2525 IEM_MC_BEGIN(0, 0);
2526 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2527 IEM_MC_REL_JMP_S16(i16Imm);
2528 } IEM_MC_ELSE() {
2529 IEM_MC_ADVANCE_RIP();
2530 } IEM_MC_ENDIF();
2531 IEM_MC_END();
2532 }
2533 else
2534 {
2535 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2536 IEMOP_HLP_NO_LOCK_PREFIX();
2537
2538 IEM_MC_BEGIN(0, 0);
2539 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2540 IEM_MC_REL_JMP_S32(i32Imm);
2541 } IEM_MC_ELSE() {
2542 IEM_MC_ADVANCE_RIP();
2543 } IEM_MC_ENDIF();
2544 IEM_MC_END();
2545 }
2546 return VINF_SUCCESS;
2547}
2548
2549
2550/** Opcode 0x0f 0x8f. */
2551FNIEMOP_DEF(iemOp_jnle_Jv)
2552{
2553 IEMOP_MNEMONIC("jnle/jg Jv");
2554 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2555 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
2556 {
2557 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
2558 IEMOP_HLP_NO_LOCK_PREFIX();
2559
2560 IEM_MC_BEGIN(0, 0);
2561 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2562 IEM_MC_ADVANCE_RIP();
2563 } IEM_MC_ELSE() {
2564 IEM_MC_REL_JMP_S16(i16Imm);
2565 } IEM_MC_ENDIF();
2566 IEM_MC_END();
2567 }
2568 else
2569 {
2570 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
2571 IEMOP_HLP_NO_LOCK_PREFIX();
2572
2573 IEM_MC_BEGIN(0, 0);
2574 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2575 IEM_MC_ADVANCE_RIP();
2576 } IEM_MC_ELSE() {
2577 IEM_MC_REL_JMP_S32(i32Imm);
2578 } IEM_MC_ENDIF();
2579 IEM_MC_END();
2580 }
2581 return VINF_SUCCESS;
2582}
2583
2584
2585/** Opcode 0x0f 0x90. */
2586FNIEMOP_DEF(iemOp_seto_Eb)
2587{
2588 IEMOP_MNEMONIC("seto Eb");
2589 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2590 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2591
2592 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2593 * any way. AMD says it's "unused", whatever that means. We're
2594 * ignoring for now. */
2595 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2596 {
2597 /* register target */
2598 IEM_MC_BEGIN(0, 0);
2599 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2600 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2601 } IEM_MC_ELSE() {
2602 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2603 } IEM_MC_ENDIF();
2604 IEM_MC_ADVANCE_RIP();
2605 IEM_MC_END();
2606 }
2607 else
2608 {
2609 /* memory target */
2610 IEM_MC_BEGIN(0, 1);
2611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2613 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2614 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2615 } IEM_MC_ELSE() {
2616 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2617 } IEM_MC_ENDIF();
2618 IEM_MC_ADVANCE_RIP();
2619 IEM_MC_END();
2620 }
2621 return VINF_SUCCESS;
2622}
2623
2624
2625/** Opcode 0x0f 0x91. */
2626FNIEMOP_DEF(iemOp_setno_Eb)
2627{
2628 IEMOP_MNEMONIC("setno Eb");
2629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2630 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2631
2632 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2633 * any way. AMD says it's "unused", whatever that means. We're
2634 * ignoring for now. */
2635 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2636 {
2637 /* register target */
2638 IEM_MC_BEGIN(0, 0);
2639 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2640 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2641 } IEM_MC_ELSE() {
2642 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2643 } IEM_MC_ENDIF();
2644 IEM_MC_ADVANCE_RIP();
2645 IEM_MC_END();
2646 }
2647 else
2648 {
2649 /* memory target */
2650 IEM_MC_BEGIN(0, 1);
2651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2653 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2654 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2655 } IEM_MC_ELSE() {
2656 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2657 } IEM_MC_ENDIF();
2658 IEM_MC_ADVANCE_RIP();
2659 IEM_MC_END();
2660 }
2661 return VINF_SUCCESS;
2662}
2663
2664
2665/** Opcode 0x0f 0x92. */
2666FNIEMOP_DEF(iemOp_setc_Eb)
2667{
2668 IEMOP_MNEMONIC("setc Eb");
2669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2670 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2671
2672 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2673 * any way. AMD says it's "unused", whatever that means. We're
2674 * ignoring for now. */
2675 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2676 {
2677 /* register target */
2678 IEM_MC_BEGIN(0, 0);
2679 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2680 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2681 } IEM_MC_ELSE() {
2682 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2683 } IEM_MC_ENDIF();
2684 IEM_MC_ADVANCE_RIP();
2685 IEM_MC_END();
2686 }
2687 else
2688 {
2689 /* memory target */
2690 IEM_MC_BEGIN(0, 1);
2691 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2693 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2694 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2695 } IEM_MC_ELSE() {
2696 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2697 } IEM_MC_ENDIF();
2698 IEM_MC_ADVANCE_RIP();
2699 IEM_MC_END();
2700 }
2701 return VINF_SUCCESS;
2702}
2703
2704
2705/** Opcode 0x0f 0x93. */
2706FNIEMOP_DEF(iemOp_setnc_Eb)
2707{
2708 IEMOP_MNEMONIC("setnc Eb");
2709 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2710 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2711
2712 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2713 * any way. AMD says it's "unused", whatever that means. We're
2714 * ignoring for now. */
2715 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2716 {
2717 /* register target */
2718 IEM_MC_BEGIN(0, 0);
2719 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2720 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2721 } IEM_MC_ELSE() {
2722 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2723 } IEM_MC_ENDIF();
2724 IEM_MC_ADVANCE_RIP();
2725 IEM_MC_END();
2726 }
2727 else
2728 {
2729 /* memory target */
2730 IEM_MC_BEGIN(0, 1);
2731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2733 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2734 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2735 } IEM_MC_ELSE() {
2736 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2737 } IEM_MC_ENDIF();
2738 IEM_MC_ADVANCE_RIP();
2739 IEM_MC_END();
2740 }
2741 return VINF_SUCCESS;
2742}
2743
2744
2745/** Opcode 0x0f 0x94. */
2746FNIEMOP_DEF(iemOp_sete_Eb)
2747{
2748 IEMOP_MNEMONIC("sete Eb");
2749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2750 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2751
2752 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2753 * any way. AMD says it's "unused", whatever that means. We're
2754 * ignoring for now. */
2755 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2756 {
2757 /* register target */
2758 IEM_MC_BEGIN(0, 0);
2759 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2760 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2761 } IEM_MC_ELSE() {
2762 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2763 } IEM_MC_ENDIF();
2764 IEM_MC_ADVANCE_RIP();
2765 IEM_MC_END();
2766 }
2767 else
2768 {
2769 /* memory target */
2770 IEM_MC_BEGIN(0, 1);
2771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2773 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2774 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2775 } IEM_MC_ELSE() {
2776 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2777 } IEM_MC_ENDIF();
2778 IEM_MC_ADVANCE_RIP();
2779 IEM_MC_END();
2780 }
2781 return VINF_SUCCESS;
2782}
2783
2784
2785/** Opcode 0x0f 0x95. */
2786FNIEMOP_DEF(iemOp_setne_Eb)
2787{
2788 IEMOP_MNEMONIC("setne Eb");
2789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2790 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2791
2792 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2793 * any way. AMD says it's "unused", whatever that means. We're
2794 * ignoring for now. */
2795 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2796 {
2797 /* register target */
2798 IEM_MC_BEGIN(0, 0);
2799 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2800 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2801 } IEM_MC_ELSE() {
2802 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2803 } IEM_MC_ENDIF();
2804 IEM_MC_ADVANCE_RIP();
2805 IEM_MC_END();
2806 }
2807 else
2808 {
2809 /* memory target */
2810 IEM_MC_BEGIN(0, 1);
2811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2813 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2814 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2815 } IEM_MC_ELSE() {
2816 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2817 } IEM_MC_ENDIF();
2818 IEM_MC_ADVANCE_RIP();
2819 IEM_MC_END();
2820 }
2821 return VINF_SUCCESS;
2822}
2823
2824
2825/** Opcode 0x0f 0x96. */
2826FNIEMOP_DEF(iemOp_setbe_Eb)
2827{
2828 IEMOP_MNEMONIC("setbe Eb");
2829 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2830 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2831
2832 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2833 * any way. AMD says it's "unused", whatever that means. We're
2834 * ignoring for now. */
2835 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2836 {
2837 /* register target */
2838 IEM_MC_BEGIN(0, 0);
2839 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2840 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2841 } IEM_MC_ELSE() {
2842 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2843 } IEM_MC_ENDIF();
2844 IEM_MC_ADVANCE_RIP();
2845 IEM_MC_END();
2846 }
2847 else
2848 {
2849 /* memory target */
2850 IEM_MC_BEGIN(0, 1);
2851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2852 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2853 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2854 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2855 } IEM_MC_ELSE() {
2856 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2857 } IEM_MC_ENDIF();
2858 IEM_MC_ADVANCE_RIP();
2859 IEM_MC_END();
2860 }
2861 return VINF_SUCCESS;
2862}
2863
2864
2865/** Opcode 0x0f 0x97. */
2866FNIEMOP_DEF(iemOp_setnbe_Eb)
2867{
2868 IEMOP_MNEMONIC("setnbe Eb");
2869 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2870 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2871
2872 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2873 * any way. AMD says it's "unused", whatever that means. We're
2874 * ignoring for now. */
2875 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2876 {
2877 /* register target */
2878 IEM_MC_BEGIN(0, 0);
2879 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2880 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2881 } IEM_MC_ELSE() {
2882 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2883 } IEM_MC_ENDIF();
2884 IEM_MC_ADVANCE_RIP();
2885 IEM_MC_END();
2886 }
2887 else
2888 {
2889 /* memory target */
2890 IEM_MC_BEGIN(0, 1);
2891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2893 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2894 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2895 } IEM_MC_ELSE() {
2896 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2897 } IEM_MC_ENDIF();
2898 IEM_MC_ADVANCE_RIP();
2899 IEM_MC_END();
2900 }
2901 return VINF_SUCCESS;
2902}
2903
2904
2905/** Opcode 0x0f 0x98. */
2906FNIEMOP_DEF(iemOp_sets_Eb)
2907{
2908 IEMOP_MNEMONIC("sets Eb");
2909 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2910 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2911
2912 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2913 * any way. AMD says it's "unused", whatever that means. We're
2914 * ignoring for now. */
2915 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2916 {
2917 /* register target */
2918 IEM_MC_BEGIN(0, 0);
2919 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2920 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2921 } IEM_MC_ELSE() {
2922 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2923 } IEM_MC_ENDIF();
2924 IEM_MC_ADVANCE_RIP();
2925 IEM_MC_END();
2926 }
2927 else
2928 {
2929 /* memory target */
2930 IEM_MC_BEGIN(0, 1);
2931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2933 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2934 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2935 } IEM_MC_ELSE() {
2936 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2937 } IEM_MC_ENDIF();
2938 IEM_MC_ADVANCE_RIP();
2939 IEM_MC_END();
2940 }
2941 return VINF_SUCCESS;
2942}
2943
2944
2945/** Opcode 0x0f 0x99. */
2946FNIEMOP_DEF(iemOp_setns_Eb)
2947{
2948 IEMOP_MNEMONIC("setns Eb");
2949 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2950 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2951
2952 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2953 * any way. AMD says it's "unused", whatever that means. We're
2954 * ignoring for now. */
2955 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2956 {
2957 /* register target */
2958 IEM_MC_BEGIN(0, 0);
2959 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2960 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
2961 } IEM_MC_ELSE() {
2962 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
2963 } IEM_MC_ENDIF();
2964 IEM_MC_ADVANCE_RIP();
2965 IEM_MC_END();
2966 }
2967 else
2968 {
2969 /* memory target */
2970 IEM_MC_BEGIN(0, 1);
2971 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
2973 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2974 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
2975 } IEM_MC_ELSE() {
2976 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
2977 } IEM_MC_ENDIF();
2978 IEM_MC_ADVANCE_RIP();
2979 IEM_MC_END();
2980 }
2981 return VINF_SUCCESS;
2982}
2983
2984
2985/** Opcode 0x0f 0x9a. */
2986FNIEMOP_DEF(iemOp_setp_Eb)
2987{
2988 IEMOP_MNEMONIC("setnp Eb");
2989 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2990 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
2991
2992 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
2993 * any way. AMD says it's "unused", whatever that means. We're
2994 * ignoring for now. */
2995 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2996 {
2997 /* register target */
2998 IEM_MC_BEGIN(0, 0);
2999 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3000 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3001 } IEM_MC_ELSE() {
3002 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3003 } IEM_MC_ENDIF();
3004 IEM_MC_ADVANCE_RIP();
3005 IEM_MC_END();
3006 }
3007 else
3008 {
3009 /* memory target */
3010 IEM_MC_BEGIN(0, 1);
3011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3013 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3014 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3015 } IEM_MC_ELSE() {
3016 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3017 } IEM_MC_ENDIF();
3018 IEM_MC_ADVANCE_RIP();
3019 IEM_MC_END();
3020 }
3021 return VINF_SUCCESS;
3022}
3023
3024
3025/** Opcode 0x0f 0x9b. */
3026FNIEMOP_DEF(iemOp_setnp_Eb)
3027{
3028 IEMOP_MNEMONIC("setnp Eb");
3029 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3030 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3031
3032 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3033 * any way. AMD says it's "unused", whatever that means. We're
3034 * ignoring for now. */
3035 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3036 {
3037 /* register target */
3038 IEM_MC_BEGIN(0, 0);
3039 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3040 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3041 } IEM_MC_ELSE() {
3042 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3043 } IEM_MC_ENDIF();
3044 IEM_MC_ADVANCE_RIP();
3045 IEM_MC_END();
3046 }
3047 else
3048 {
3049 /* memory target */
3050 IEM_MC_BEGIN(0, 1);
3051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3053 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3054 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3055 } IEM_MC_ELSE() {
3056 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3057 } IEM_MC_ENDIF();
3058 IEM_MC_ADVANCE_RIP();
3059 IEM_MC_END();
3060 }
3061 return VINF_SUCCESS;
3062}
3063
3064
3065/** Opcode 0x0f 0x9c. */
3066FNIEMOP_DEF(iemOp_setl_Eb)
3067{
3068 IEMOP_MNEMONIC("setl Eb");
3069 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3070 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3071
3072 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3073 * any way. AMD says it's "unused", whatever that means. We're
3074 * ignoring for now. */
3075 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3076 {
3077 /* register target */
3078 IEM_MC_BEGIN(0, 0);
3079 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3080 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3081 } IEM_MC_ELSE() {
3082 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3083 } IEM_MC_ENDIF();
3084 IEM_MC_ADVANCE_RIP();
3085 IEM_MC_END();
3086 }
3087 else
3088 {
3089 /* memory target */
3090 IEM_MC_BEGIN(0, 1);
3091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3093 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3094 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3095 } IEM_MC_ELSE() {
3096 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3097 } IEM_MC_ENDIF();
3098 IEM_MC_ADVANCE_RIP();
3099 IEM_MC_END();
3100 }
3101 return VINF_SUCCESS;
3102}
3103
3104
3105/** Opcode 0x0f 0x9d. */
3106FNIEMOP_DEF(iemOp_setnl_Eb)
3107{
3108 IEMOP_MNEMONIC("setnl Eb");
3109 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3110 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3111
3112 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3113 * any way. AMD says it's "unused", whatever that means. We're
3114 * ignoring for now. */
3115 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3116 {
3117 /* register target */
3118 IEM_MC_BEGIN(0, 0);
3119 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3120 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3121 } IEM_MC_ELSE() {
3122 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3123 } IEM_MC_ENDIF();
3124 IEM_MC_ADVANCE_RIP();
3125 IEM_MC_END();
3126 }
3127 else
3128 {
3129 /* memory target */
3130 IEM_MC_BEGIN(0, 1);
3131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3133 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3134 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3135 } IEM_MC_ELSE() {
3136 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3137 } IEM_MC_ENDIF();
3138 IEM_MC_ADVANCE_RIP();
3139 IEM_MC_END();
3140 }
3141 return VINF_SUCCESS;
3142}
3143
3144
3145/** Opcode 0x0f 0x9e. */
3146FNIEMOP_DEF(iemOp_setle_Eb)
3147{
3148 IEMOP_MNEMONIC("setle Eb");
3149 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3150 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3151
3152 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3153 * any way. AMD says it's "unused", whatever that means. We're
3154 * ignoring for now. */
3155 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3156 {
3157 /* register target */
3158 IEM_MC_BEGIN(0, 0);
3159 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3160 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3161 } IEM_MC_ELSE() {
3162 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3163 } IEM_MC_ENDIF();
3164 IEM_MC_ADVANCE_RIP();
3165 IEM_MC_END();
3166 }
3167 else
3168 {
3169 /* memory target */
3170 IEM_MC_BEGIN(0, 1);
3171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3173 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3174 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3175 } IEM_MC_ELSE() {
3176 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3177 } IEM_MC_ENDIF();
3178 IEM_MC_ADVANCE_RIP();
3179 IEM_MC_END();
3180 }
3181 return VINF_SUCCESS;
3182}
3183
3184
3185/** Opcode 0x0f 0x9f. */
3186FNIEMOP_DEF(iemOp_setnle_Eb)
3187{
3188 IEMOP_MNEMONIC("setnle Eb");
3189 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3190 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3191
3192 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3193 * any way. AMD says it's "unused", whatever that means. We're
3194 * ignoring for now. */
3195 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3196 {
3197 /* register target */
3198 IEM_MC_BEGIN(0, 0);
3199 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3200 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3201 } IEM_MC_ELSE() {
3202 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3203 } IEM_MC_ENDIF();
3204 IEM_MC_ADVANCE_RIP();
3205 IEM_MC_END();
3206 }
3207 else
3208 {
3209 /* memory target */
3210 IEM_MC_BEGIN(0, 1);
3211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3212 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3213 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3214 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3215 } IEM_MC_ELSE() {
3216 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3217 } IEM_MC_ENDIF();
3218 IEM_MC_ADVANCE_RIP();
3219 IEM_MC_END();
3220 }
3221 return VINF_SUCCESS;
3222}
3223
3224
3225/**
3226 * Common 'push segment-register' helper.
3227 */
3228FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
3229{
3230 IEMOP_HLP_NO_LOCK_PREFIX();
3231 if (iReg < X86_SREG_FS)
3232 IEMOP_HLP_NO_64BIT();
3233 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3234
3235 switch (pIemCpu->enmEffOpSize)
3236 {
3237 case IEMMODE_16BIT:
3238 IEM_MC_BEGIN(0, 1);
3239 IEM_MC_LOCAL(uint16_t, u16Value);
3240 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
3241 IEM_MC_PUSH_U16(u16Value);
3242 IEM_MC_ADVANCE_RIP();
3243 IEM_MC_END();
3244 break;
3245
3246 case IEMMODE_32BIT:
3247 IEM_MC_BEGIN(0, 1);
3248 IEM_MC_LOCAL(uint32_t, u32Value);
3249 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
3250 IEM_MC_PUSH_U32(u32Value);
3251 IEM_MC_ADVANCE_RIP();
3252 IEM_MC_END();
3253 break;
3254
3255 case IEMMODE_64BIT:
3256 IEM_MC_BEGIN(0, 1);
3257 IEM_MC_LOCAL(uint64_t, u64Value);
3258 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
3259 IEM_MC_PUSH_U64(u64Value);
3260 IEM_MC_ADVANCE_RIP();
3261 IEM_MC_END();
3262 break;
3263 }
3264
3265 return VINF_SUCCESS;
3266}
3267
3268
3269/** Opcode 0x0f 0xa0. */
3270FNIEMOP_DEF(iemOp_push_fs)
3271{
3272 IEMOP_MNEMONIC("push fs");
3273 IEMOP_HLP_NO_LOCK_PREFIX();
3274 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
3275}
3276
3277
3278/** Opcode 0x0f 0xa1. */
3279FNIEMOP_DEF(iemOp_pop_fs)
3280{
3281 IEMOP_MNEMONIC("pop fs");
3282 IEMOP_HLP_NO_LOCK_PREFIX();
3283 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pIemCpu->enmEffOpSize);
3284}
3285
3286
3287/** Opcode 0x0f 0xa2. */
3288FNIEMOP_DEF(iemOp_cpuid)
3289{
3290 IEMOP_MNEMONIC("cpuid");
3291 IEMOP_HLP_NO_LOCK_PREFIX();
3292 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
3293}
3294
3295
3296/**
3297 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
3298 * iemOp_bts_Ev_Gv.
3299 */
3300FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
3301{
3302 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3303 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3304
3305 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3306 {
3307 /* register destination. */
3308 IEMOP_HLP_NO_LOCK_PREFIX();
3309 switch (pIemCpu->enmEffOpSize)
3310 {
3311 case IEMMODE_16BIT:
3312 IEM_MC_BEGIN(3, 0);
3313 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3314 IEM_MC_ARG(uint16_t, u16Src, 1);
3315 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3316
3317 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3318 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
3319 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3320 IEM_MC_REF_EFLAGS(pEFlags);
3321 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3322
3323 IEM_MC_ADVANCE_RIP();
3324 IEM_MC_END();
3325 return VINF_SUCCESS;
3326
3327 case IEMMODE_32BIT:
3328 IEM_MC_BEGIN(3, 0);
3329 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3330 IEM_MC_ARG(uint32_t, u32Src, 1);
3331 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3332
3333 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3334 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
3335 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3336 IEM_MC_REF_EFLAGS(pEFlags);
3337 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3338
3339 IEM_MC_ADVANCE_RIP();
3340 IEM_MC_END();
3341 return VINF_SUCCESS;
3342
3343 case IEMMODE_64BIT:
3344 IEM_MC_BEGIN(3, 0);
3345 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3346 IEM_MC_ARG(uint64_t, u64Src, 1);
3347 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3348
3349 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3350 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
3351 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3352 IEM_MC_REF_EFLAGS(pEFlags);
3353 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3354
3355 IEM_MC_ADVANCE_RIP();
3356 IEM_MC_END();
3357 return VINF_SUCCESS;
3358
3359 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3360 }
3361 }
3362 else
3363 {
3364 /* memory destination. */
3365
3366 uint32_t fAccess;
3367 if (pImpl->pfnLockedU16)
3368 fAccess = IEM_ACCESS_DATA_RW;
3369 else /* BT */
3370 {
3371 IEMOP_HLP_NO_LOCK_PREFIX();
3372 fAccess = IEM_ACCESS_DATA_R;
3373 }
3374
3375 /** @todo test negative bit offsets! */
3376 switch (pIemCpu->enmEffOpSize)
3377 {
3378 case IEMMODE_16BIT:
3379 IEM_MC_BEGIN(3, 2);
3380 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3381 IEM_MC_ARG(uint16_t, u16Src, 1);
3382 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3384 IEM_MC_LOCAL(int16_t, i16AddrAdj);
3385
3386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3387 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3388 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
3389 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
3390 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
3391 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
3392 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
3393 IEM_MC_FETCH_EFLAGS(EFlags);
3394
3395 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3396 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
3397 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3398 else
3399 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3400 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
3401
3402 IEM_MC_COMMIT_EFLAGS(EFlags);
3403 IEM_MC_ADVANCE_RIP();
3404 IEM_MC_END();
3405 return VINF_SUCCESS;
3406
3407 case IEMMODE_32BIT:
3408 IEM_MC_BEGIN(3, 2);
3409 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3410 IEM_MC_ARG(uint32_t, u32Src, 1);
3411 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3413 IEM_MC_LOCAL(int32_t, i32AddrAdj);
3414
3415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3416 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3417 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
3418 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
3419 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
3420 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
3421 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
3422 IEM_MC_FETCH_EFLAGS(EFlags);
3423
3424 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3425 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
3426 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3427 else
3428 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3429 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
3430
3431 IEM_MC_COMMIT_EFLAGS(EFlags);
3432 IEM_MC_ADVANCE_RIP();
3433 IEM_MC_END();
3434 return VINF_SUCCESS;
3435
3436 case IEMMODE_64BIT:
3437 IEM_MC_BEGIN(3, 2);
3438 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3439 IEM_MC_ARG(uint64_t, u64Src, 1);
3440 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3441 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3442 IEM_MC_LOCAL(int64_t, i64AddrAdj);
3443
3444 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3445 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3446 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
3447 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
3448 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
3449 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
3450 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
3451 IEM_MC_FETCH_EFLAGS(EFlags);
3452
3453 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3454 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
3455 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3456 else
3457 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3458 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
3459
3460 IEM_MC_COMMIT_EFLAGS(EFlags);
3461 IEM_MC_ADVANCE_RIP();
3462 IEM_MC_END();
3463 return VINF_SUCCESS;
3464
3465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3466 }
3467 }
3468}
3469
3470
3471/** Opcode 0x0f 0xa3. */
3472FNIEMOP_DEF(iemOp_bt_Ev_Gv)
3473{
3474 IEMOP_MNEMONIC("bt Gv,Gv");
3475 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
3476}
3477
3478
3479/**
3480 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
3481 */
3482FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
3483{
3484 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3485 IEMOP_HLP_NO_LOCK_PREFIX();
3486 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
3487
3488 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3489 {
3490 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
3491 IEMOP_HLP_NO_LOCK_PREFIX();
3492
3493 switch (pIemCpu->enmEffOpSize)
3494 {
3495 case IEMMODE_16BIT:
3496 IEM_MC_BEGIN(4, 0);
3497 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3498 IEM_MC_ARG(uint16_t, u16Src, 1);
3499 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
3500 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3501
3502 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3503 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3504 IEM_MC_REF_EFLAGS(pEFlags);
3505 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
3506
3507 IEM_MC_ADVANCE_RIP();
3508 IEM_MC_END();
3509 return VINF_SUCCESS;
3510
3511 case IEMMODE_32BIT:
3512 IEM_MC_BEGIN(4, 0);
3513 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3514 IEM_MC_ARG(uint32_t, u32Src, 1);
3515 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
3516 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3517
3518 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3519 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3520 IEM_MC_REF_EFLAGS(pEFlags);
3521 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
3522
3523 IEM_MC_ADVANCE_RIP();
3524 IEM_MC_END();
3525 return VINF_SUCCESS;
3526
3527 case IEMMODE_64BIT:
3528 IEM_MC_BEGIN(4, 0);
3529 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3530 IEM_MC_ARG(uint64_t, u64Src, 1);
3531 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
3532 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3533
3534 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3535 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3536 IEM_MC_REF_EFLAGS(pEFlags);
3537 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
3538
3539 IEM_MC_ADVANCE_RIP();
3540 IEM_MC_END();
3541 return VINF_SUCCESS;
3542
3543 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3544 }
3545 }
3546 else
3547 {
3548 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3549
3550 switch (pIemCpu->enmEffOpSize)
3551 {
3552 case IEMMODE_16BIT:
3553 IEM_MC_BEGIN(4, 2);
3554 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3555 IEM_MC_ARG(uint16_t, u16Src, 1);
3556 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3557 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3559
3560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3561 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
3562 IEM_MC_ASSIGN(cShiftArg, cShift);
3563 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3564 IEM_MC_FETCH_EFLAGS(EFlags);
3565 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3566 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
3567
3568 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
3569 IEM_MC_COMMIT_EFLAGS(EFlags);
3570 IEM_MC_ADVANCE_RIP();
3571 IEM_MC_END();
3572 return VINF_SUCCESS;
3573
3574 case IEMMODE_32BIT:
3575 IEM_MC_BEGIN(4, 2);
3576 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3577 IEM_MC_ARG(uint32_t, u32Src, 1);
3578 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3579 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3581
3582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3583 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
3584 IEM_MC_ASSIGN(cShiftArg, cShift);
3585 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3586 IEM_MC_FETCH_EFLAGS(EFlags);
3587 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3588 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
3589
3590 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
3591 IEM_MC_COMMIT_EFLAGS(EFlags);
3592 IEM_MC_ADVANCE_RIP();
3593 IEM_MC_END();
3594 return VINF_SUCCESS;
3595
3596 case IEMMODE_64BIT:
3597 IEM_MC_BEGIN(4, 2);
3598 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3599 IEM_MC_ARG(uint64_t, u64Src, 1);
3600 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3601 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3603
3604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3605 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
3606 IEM_MC_ASSIGN(cShiftArg, cShift);
3607 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3608 IEM_MC_FETCH_EFLAGS(EFlags);
3609 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3610 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
3611
3612 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
3613 IEM_MC_COMMIT_EFLAGS(EFlags);
3614 IEM_MC_ADVANCE_RIP();
3615 IEM_MC_END();
3616 return VINF_SUCCESS;
3617
3618 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3619 }
3620 }
3621}
3622
3623
3624/**
3625 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
3626 */
3627FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
3628{
3629 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3630 IEMOP_HLP_NO_LOCK_PREFIX();
3631 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
3632
3633 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3634 {
3635 IEMOP_HLP_NO_LOCK_PREFIX();
3636
3637 switch (pIemCpu->enmEffOpSize)
3638 {
3639 case IEMMODE_16BIT:
3640 IEM_MC_BEGIN(4, 0);
3641 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3642 IEM_MC_ARG(uint16_t, u16Src, 1);
3643 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3644 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3645
3646 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3647 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3648 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3649 IEM_MC_REF_EFLAGS(pEFlags);
3650 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
3651
3652 IEM_MC_ADVANCE_RIP();
3653 IEM_MC_END();
3654 return VINF_SUCCESS;
3655
3656 case IEMMODE_32BIT:
3657 IEM_MC_BEGIN(4, 0);
3658 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3659 IEM_MC_ARG(uint32_t, u32Src, 1);
3660 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3661 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3662
3663 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3664 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3665 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3666 IEM_MC_REF_EFLAGS(pEFlags);
3667 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
3668
3669 IEM_MC_ADVANCE_RIP();
3670 IEM_MC_END();
3671 return VINF_SUCCESS;
3672
3673 case IEMMODE_64BIT:
3674 IEM_MC_BEGIN(4, 0);
3675 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3676 IEM_MC_ARG(uint64_t, u64Src, 1);
3677 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3678 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3679
3680 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3681 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3682 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3683 IEM_MC_REF_EFLAGS(pEFlags);
3684 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
3685
3686 IEM_MC_ADVANCE_RIP();
3687 IEM_MC_END();
3688 return VINF_SUCCESS;
3689
3690 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3691 }
3692 }
3693 else
3694 {
3695 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3696
3697 switch (pIemCpu->enmEffOpSize)
3698 {
3699 case IEMMODE_16BIT:
3700 IEM_MC_BEGIN(4, 2);
3701 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3702 IEM_MC_ARG(uint16_t, u16Src, 1);
3703 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3704 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3706
3707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3708 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3709 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3710 IEM_MC_FETCH_EFLAGS(EFlags);
3711 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3712 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
3713
3714 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
3715 IEM_MC_COMMIT_EFLAGS(EFlags);
3716 IEM_MC_ADVANCE_RIP();
3717 IEM_MC_END();
3718 return VINF_SUCCESS;
3719
3720 case IEMMODE_32BIT:
3721 IEM_MC_BEGIN(4, 2);
3722 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3723 IEM_MC_ARG(uint32_t, u32Src, 1);
3724 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3725 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3727
3728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3729 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3730 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3731 IEM_MC_FETCH_EFLAGS(EFlags);
3732 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3733 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
3734
3735 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
3736 IEM_MC_COMMIT_EFLAGS(EFlags);
3737 IEM_MC_ADVANCE_RIP();
3738 IEM_MC_END();
3739 return VINF_SUCCESS;
3740
3741 case IEMMODE_64BIT:
3742 IEM_MC_BEGIN(4, 2);
3743 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3744 IEM_MC_ARG(uint64_t, u64Src, 1);
3745 IEM_MC_ARG(uint8_t, cShiftArg, 2);
3746 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
3747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3748
3749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
3750 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3751 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
3752 IEM_MC_FETCH_EFLAGS(EFlags);
3753 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
3754 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
3755
3756 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
3757 IEM_MC_COMMIT_EFLAGS(EFlags);
3758 IEM_MC_ADVANCE_RIP();
3759 IEM_MC_END();
3760 return VINF_SUCCESS;
3761
3762 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3763 }
3764 }
3765}
3766
3767
3768
3769/** Opcode 0x0f 0xa4. */
3770FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
3771{
3772 IEMOP_MNEMONIC("shld Ev,Gv,Ib");
3773 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
3774}
3775
3776
3777/** Opcode 0x0f 0xa7. */
3778FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
3779{
3780 IEMOP_MNEMONIC("shld Ev,Gv,CL");
3781 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
3782}
3783
3784
3785/** Opcode 0x0f 0xa8. */
3786FNIEMOP_DEF(iemOp_push_gs)
3787{
3788 IEMOP_MNEMONIC("push gs");
3789 IEMOP_HLP_NO_LOCK_PREFIX();
3790 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
3791}
3792
3793
3794/** Opcode 0x0f 0xa9. */
3795FNIEMOP_DEF(iemOp_pop_gs)
3796{
3797 IEMOP_MNEMONIC("pop gs");
3798 IEMOP_HLP_NO_LOCK_PREFIX();
3799 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pIemCpu->enmEffOpSize);
3800}
3801
3802
3803/** Opcode 0x0f 0xaa. */
3804FNIEMOP_STUB(iemOp_rsm);
3805
3806
3807/** Opcode 0x0f 0xab. */
3808FNIEMOP_DEF(iemOp_bts_Ev_Gv)
3809{
3810 IEMOP_MNEMONIC("bts Ev,Gv");
3811 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
3812}
3813
3814
3815/** Opcode 0x0f 0xac. */
3816FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
3817{
3818 IEMOP_MNEMONIC("shrd Ev,Gv,Ib");
3819 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
3820}
3821
3822
3823/** Opcode 0x0f 0xad. */
3824FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
3825{
3826 IEMOP_MNEMONIC("shrd Ev,Gv,CL");
3827 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
3828}
3829
3830
3831/** Opcode 0x0f 0xae mem/0. */
3832FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
3833{
3834 IEMOP_MNEMONIC("fxsave m512");
3835 IEMOP_HLP_NO_LOCK_PREFIX();
3836 if (!IEM_IS_INTEL_CPUID_FEATURE_PRESENT_EDX(X86_CPUID_FEATURE_EDX_FXSR))
3837 return IEMOP_RAISE_INVALID_LOCK_PREFIX();
3838
3839 IEM_MC_BEGIN(3, 1);
3840 IEM_MC_ARG_CONST(uint8_t, iEffSeg,/*=*/pIemCpu->iEffSeg, 0);
3841 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
3842 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
3843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm);
3844 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
3845 IEM_MC_END();
3846 return VINF_SUCCESS;
3847}
3848
3849
3850/** Opcode 0x0f 0xae mem/1. */
3851FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
3852{
3853 IEMOP_MNEMONIC("fxrstor m512");
3854 IEMOP_HLP_NO_LOCK_PREFIX();
3855 if (!IEM_IS_INTEL_CPUID_FEATURE_PRESENT_EDX(X86_CPUID_FEATURE_EDX_FXSR))
3856 return IEMOP_RAISE_INVALID_LOCK_PREFIX();
3857
3858 IEM_MC_BEGIN(3, 1);
3859 IEM_MC_ARG_CONST(uint8_t, iEffSeg,/*=*/pIemCpu->iEffSeg, 0);
3860 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
3861 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
3862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm);
3863 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
3864 IEM_MC_END();
3865 return VINF_SUCCESS;
3866}
3867
3868
3869/** Opcode 0x0f 0xae mem/2. */
3870FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
3871
3872/** Opcode 0x0f 0xae mem/3. */
3873FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
3874
3875/** Opcode 0x0f 0xae mem/4. */
3876FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
3877
3878/** Opcode 0x0f 0xae mem/5. */
3879FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
3880
3881/** Opcode 0x0f 0xae mem/6. */
3882FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
3883
3884/** Opcode 0x0f 0xae mem/7. */
3885FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
3886
3887/** Opcode 0x0f 0xae 11b/5. */
3888FNIEMOP_STUB_1(iemOp_Grp15_lfence, uint8_t, bRm);
3889
3890/** Opcode 0x0f 0xae 11b/6. */
3891FNIEMOP_STUB_1(iemOp_Grp15_mfence, uint8_t, bRm);
3892
3893/** Opcode 0x0f 0xae 11b/7. */
3894FNIEMOP_STUB_1(iemOp_Grp15_sfence, uint8_t, bRm);
3895
3896/** Opcode 0xf3 0x0f 0xae 11b/0. */
3897FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
3898
3899/** Opcode 0xf3 0x0f 0xae 11b/1. */
3900FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
3901
3902/** Opcode 0xf3 0x0f 0xae 11b/2. */
3903FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
3904
3905/** Opcode 0xf3 0x0f 0xae 11b/3. */
3906FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
3907
3908
3909/** Opcode 0x0f 0xae. */
3910FNIEMOP_DEF(iemOp_Grp15)
3911{
3912 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3913 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
3914 {
3915 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3916 {
3917 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
3918 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
3919 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
3920 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
3921 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
3922 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
3923 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
3924 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
3925 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3926 }
3927 }
3928 else
3929 {
3930 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
3931 {
3932 case 0:
3933 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3934 {
3935 case 0: return IEMOP_RAISE_INVALID_OPCODE();
3936 case 1: return IEMOP_RAISE_INVALID_OPCODE();
3937 case 2: return IEMOP_RAISE_INVALID_OPCODE();
3938 case 3: return IEMOP_RAISE_INVALID_OPCODE();
3939 case 4: return IEMOP_RAISE_INVALID_OPCODE();
3940 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
3941 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
3942 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
3943 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3944 }
3945 break;
3946
3947 case IEM_OP_PRF_REPZ:
3948 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3949 {
3950 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
3951 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
3952 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
3953 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
3954 case 4: return IEMOP_RAISE_INVALID_OPCODE();
3955 case 5: return IEMOP_RAISE_INVALID_OPCODE();
3956 case 6: return IEMOP_RAISE_INVALID_OPCODE();
3957 case 7: return IEMOP_RAISE_INVALID_OPCODE();
3958 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3959 }
3960 break;
3961
3962 default:
3963 return IEMOP_RAISE_INVALID_OPCODE();
3964 }
3965 }
3966}
3967
3968
3969/** Opcode 0x0f 0xaf. */
3970FNIEMOP_DEF(iemOp_imul_Gv_Ev)
3971{
3972 IEMOP_MNEMONIC("imul Gv,Ev");
3973 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3974 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
3975}
3976
3977
3978/** Opcode 0x0f 0xb0. */
3979FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
3980{
3981 IEMOP_MNEMONIC("cmpxchg Eb,Gb");
3982 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3983
3984 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3985 {
3986 IEMOP_HLP_DONE_DECODING();
3987 IEM_MC_BEGIN(4, 0);
3988 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3989 IEM_MC_ARG(uint8_t *, pu8Al, 1);
3990 IEM_MC_ARG(uint8_t, u8Src, 2);
3991 IEM_MC_ARG(uint32_t *, pEFlags, 3);
3992
3993 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3994 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
3995 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
3996 IEM_MC_REF_EFLAGS(pEFlags);
3997 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
3998 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
3999 else
4000 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
4001
4002 IEM_MC_ADVANCE_RIP();
4003 IEM_MC_END();
4004 }
4005 else
4006 {
4007 IEM_MC_BEGIN(4, 3);
4008 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
4009 IEM_MC_ARG(uint8_t *, pu8Al, 1);
4010 IEM_MC_ARG(uint8_t, u8Src, 2);
4011 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4013 IEM_MC_LOCAL(uint8_t, u8Al);
4014
4015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4016 IEMOP_HLP_DONE_DECODING();
4017 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4018 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4019 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
4020 IEM_MC_FETCH_EFLAGS(EFlags);
4021 IEM_MC_REF_LOCAL(pu8Al, u8Al);
4022 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4023 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
4024 else
4025 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
4026
4027 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
4028 IEM_MC_COMMIT_EFLAGS(EFlags);
4029 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
4030 IEM_MC_ADVANCE_RIP();
4031 IEM_MC_END();
4032 }
4033 return VINF_SUCCESS;
4034}
4035
4036/** Opcode 0x0f 0xb1. */
4037FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
4038{
4039 IEMOP_MNEMONIC("cmpxchg Ev,Gv");
4040 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4041
4042 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4043 {
4044 IEMOP_HLP_DONE_DECODING();
4045 switch (pIemCpu->enmEffOpSize)
4046 {
4047 case IEMMODE_16BIT:
4048 IEM_MC_BEGIN(4, 0);
4049 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4050 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
4051 IEM_MC_ARG(uint16_t, u16Src, 2);
4052 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4053
4054 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4055 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4056 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
4057 IEM_MC_REF_EFLAGS(pEFlags);
4058 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4059 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
4060 else
4061 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
4062
4063 IEM_MC_ADVANCE_RIP();
4064 IEM_MC_END();
4065 return VINF_SUCCESS;
4066
4067 case IEMMODE_32BIT:
4068 IEM_MC_BEGIN(4, 0);
4069 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4070 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
4071 IEM_MC_ARG(uint32_t, u32Src, 2);
4072 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4073
4074 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4075 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4076 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
4077 IEM_MC_REF_EFLAGS(pEFlags);
4078 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4079 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
4080 else
4081 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
4082
4083 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
4084 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4085 IEM_MC_ADVANCE_RIP();
4086 IEM_MC_END();
4087 return VINF_SUCCESS;
4088
4089 case IEMMODE_64BIT:
4090 IEM_MC_BEGIN(4, 0);
4091 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4092 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
4093#ifdef RT_ARCH_X86
4094 IEM_MC_ARG(uint64_t *, pu64Src, 2);
4095#else
4096 IEM_MC_ARG(uint64_t, u64Src, 2);
4097#endif
4098 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4099
4100 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4101 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
4102 IEM_MC_REF_EFLAGS(pEFlags);
4103#ifdef RT_ARCH_X86
4104 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4105 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4106 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
4107 else
4108 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
4109#else
4110 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4111 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4112 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
4113 else
4114 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
4115#endif
4116
4117 IEM_MC_ADVANCE_RIP();
4118 IEM_MC_END();
4119 return VINF_SUCCESS;
4120
4121 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4122 }
4123 }
4124 else
4125 {
4126 switch (pIemCpu->enmEffOpSize)
4127 {
4128 case IEMMODE_16BIT:
4129 IEM_MC_BEGIN(4, 3);
4130 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4131 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
4132 IEM_MC_ARG(uint16_t, u16Src, 2);
4133 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4135 IEM_MC_LOCAL(uint16_t, u16Ax);
4136
4137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4138 IEMOP_HLP_DONE_DECODING();
4139 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4140 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4141 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
4142 IEM_MC_FETCH_EFLAGS(EFlags);
4143 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
4144 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4145 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
4146 else
4147 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
4148
4149 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4150 IEM_MC_COMMIT_EFLAGS(EFlags);
4151 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
4152 IEM_MC_ADVANCE_RIP();
4153 IEM_MC_END();
4154 return VINF_SUCCESS;
4155
4156 case IEMMODE_32BIT:
4157 IEM_MC_BEGIN(4, 3);
4158 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4159 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
4160 IEM_MC_ARG(uint32_t, u32Src, 2);
4161 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4163 IEM_MC_LOCAL(uint32_t, u32Eax);
4164
4165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4166 IEMOP_HLP_DONE_DECODING();
4167 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4168 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4169 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
4170 IEM_MC_FETCH_EFLAGS(EFlags);
4171 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
4172 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4173 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
4174 else
4175 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
4176
4177 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4178 IEM_MC_COMMIT_EFLAGS(EFlags);
4179 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
4180 IEM_MC_ADVANCE_RIP();
4181 IEM_MC_END();
4182 return VINF_SUCCESS;
4183
4184 case IEMMODE_64BIT:
4185 IEM_MC_BEGIN(4, 3);
4186 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4187 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
4188#ifdef RT_ARCH_X86
4189 IEM_MC_ARG(uint64_t *, pu64Src, 2);
4190#else
4191 IEM_MC_ARG(uint64_t, u64Src, 2);
4192#endif
4193 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4195 IEM_MC_LOCAL(uint64_t, u64Rax);
4196
4197 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4198 IEMOP_HLP_DONE_DECODING();
4199 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4200 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
4201 IEM_MC_FETCH_EFLAGS(EFlags);
4202 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
4203#ifdef RT_ARCH_X86
4204 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4205 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4206 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
4207 else
4208 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
4209#else
4210 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4211 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4212 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
4213 else
4214 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
4215#endif
4216
4217 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4218 IEM_MC_COMMIT_EFLAGS(EFlags);
4219 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
4220 IEM_MC_ADVANCE_RIP();
4221 IEM_MC_END();
4222 return VINF_SUCCESS;
4223
4224 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4225 }
4226 }
4227}
4228
4229
4230FNIEMOP_DEF_1(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg)
4231{
4232 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4233 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4234
4235 /* The source cannot be a register. */
4236 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4237 return IEMOP_RAISE_INVALID_OPCODE();
4238 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
4239
4240 switch (pIemCpu->enmEffOpSize)
4241 {
4242 case IEMMODE_16BIT:
4243 IEM_MC_BEGIN(5, 1);
4244 IEM_MC_ARG(uint16_t, uSel, 0);
4245 IEM_MC_ARG(uint16_t, offSeg, 1);
4246 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
4247 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
4248 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
4249 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
4250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm);
4251 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEff);
4252 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 2);
4253 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
4254 IEM_MC_END();
4255 return VINF_SUCCESS;
4256
4257 case IEMMODE_32BIT:
4258 IEM_MC_BEGIN(5, 1);
4259 IEM_MC_ARG(uint16_t, uSel, 0);
4260 IEM_MC_ARG(uint32_t, offSeg, 1);
4261 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
4262 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
4263 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
4264 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
4265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm);
4266 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEff);
4267 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 4);
4268 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
4269 IEM_MC_END();
4270 return VINF_SUCCESS;
4271
4272 case IEMMODE_64BIT:
4273 IEM_MC_BEGIN(5, 1);
4274 IEM_MC_ARG(uint16_t, uSel, 0);
4275 IEM_MC_ARG(uint64_t, offSeg, 1);
4276 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
4277 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
4278 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
4279 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
4280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm);
4281 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
4282 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 8);
4283 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
4284 IEM_MC_END();
4285 return VINF_SUCCESS;
4286
4287 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4288 }
4289}
4290
4291
4292/** Opcode 0x0f 0xb2. */
4293FNIEMOP_DEF(iemOp_lss_Gv_Mp)
4294{
4295 IEMOP_MNEMONIC("lss Gv,Mp");
4296 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_SS);
4297}
4298
4299
4300/** Opcode 0x0f 0xb3. */
4301FNIEMOP_DEF(iemOp_btr_Ev_Gv)
4302{
4303 IEMOP_MNEMONIC("btr Ev,Gv");
4304 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
4305}
4306
4307
4308/** Opcode 0x0f 0xb4. */
4309FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
4310{
4311 IEMOP_MNEMONIC("lfs Gv,Mp");
4312 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_FS);
4313}
4314
4315
4316/** Opcode 0x0f 0xb5. */
4317FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
4318{
4319 IEMOP_MNEMONIC("lgs Gv,Mp");
4320 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_GS);
4321}
4322
4323
4324/** Opcode 0x0f 0xb6. */
4325FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
4326{
4327 IEMOP_MNEMONIC("movzx Gv,Eb");
4328
4329 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4330 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4331
4332 /*
4333 * If rm is denoting a register, no more instruction bytes.
4334 */
4335 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4336 {
4337 switch (pIemCpu->enmEffOpSize)
4338 {
4339 case IEMMODE_16BIT:
4340 IEM_MC_BEGIN(0, 1);
4341 IEM_MC_LOCAL(uint16_t, u16Value);
4342 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4343 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
4344 IEM_MC_ADVANCE_RIP();
4345 IEM_MC_END();
4346 return VINF_SUCCESS;
4347
4348 case IEMMODE_32BIT:
4349 IEM_MC_BEGIN(0, 1);
4350 IEM_MC_LOCAL(uint32_t, u32Value);
4351 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4352 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4353 IEM_MC_ADVANCE_RIP();
4354 IEM_MC_END();
4355 return VINF_SUCCESS;
4356
4357 case IEMMODE_64BIT:
4358 IEM_MC_BEGIN(0, 1);
4359 IEM_MC_LOCAL(uint64_t, u64Value);
4360 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4361 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4362 IEM_MC_ADVANCE_RIP();
4363 IEM_MC_END();
4364 return VINF_SUCCESS;
4365
4366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4367 }
4368 }
4369 else
4370 {
4371 /*
4372 * We're loading a register from memory.
4373 */
4374 switch (pIemCpu->enmEffOpSize)
4375 {
4376 case IEMMODE_16BIT:
4377 IEM_MC_BEGIN(0, 2);
4378 IEM_MC_LOCAL(uint16_t, u16Value);
4379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4381 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
4382 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
4383 IEM_MC_ADVANCE_RIP();
4384 IEM_MC_END();
4385 return VINF_SUCCESS;
4386
4387 case IEMMODE_32BIT:
4388 IEM_MC_BEGIN(0, 2);
4389 IEM_MC_LOCAL(uint32_t, u32Value);
4390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4392 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
4393 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4394 IEM_MC_ADVANCE_RIP();
4395 IEM_MC_END();
4396 return VINF_SUCCESS;
4397
4398 case IEMMODE_64BIT:
4399 IEM_MC_BEGIN(0, 2);
4400 IEM_MC_LOCAL(uint64_t, u64Value);
4401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4403 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
4404 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4405 IEM_MC_ADVANCE_RIP();
4406 IEM_MC_END();
4407 return VINF_SUCCESS;
4408
4409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4410 }
4411 }
4412}
4413
4414
4415/** Opcode 0x0f 0xb7. */
4416FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
4417{
4418 IEMOP_MNEMONIC("movzx Gv,Ew");
4419
4420 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4421 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4422
4423 /** @todo Not entirely sure how the operand size prefix is handled here,
4424 * assuming that it will be ignored. Would be nice to have a few
4425 * test for this. */
4426 /*
4427 * If rm is denoting a register, no more instruction bytes.
4428 */
4429 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4430 {
4431 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
4432 {
4433 IEM_MC_BEGIN(0, 1);
4434 IEM_MC_LOCAL(uint32_t, u32Value);
4435 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4436 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4437 IEM_MC_ADVANCE_RIP();
4438 IEM_MC_END();
4439 }
4440 else
4441 {
4442 IEM_MC_BEGIN(0, 1);
4443 IEM_MC_LOCAL(uint64_t, u64Value);
4444 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4445 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4446 IEM_MC_ADVANCE_RIP();
4447 IEM_MC_END();
4448 }
4449 }
4450 else
4451 {
4452 /*
4453 * We're loading a register from memory.
4454 */
4455 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
4456 {
4457 IEM_MC_BEGIN(0, 2);
4458 IEM_MC_LOCAL(uint32_t, u32Value);
4459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4461 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
4462 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4463 IEM_MC_ADVANCE_RIP();
4464 IEM_MC_END();
4465 }
4466 else
4467 {
4468 IEM_MC_BEGIN(0, 2);
4469 IEM_MC_LOCAL(uint64_t, u64Value);
4470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4472 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
4473 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4474 IEM_MC_ADVANCE_RIP();
4475 IEM_MC_END();
4476 }
4477 }
4478 return VINF_SUCCESS;
4479}
4480
4481
4482/** Opcode 0x0f 0xb8. */
4483FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
4484
4485
4486/** Opcode 0x0f 0xb9. */
4487FNIEMOP_DEF(iemOp_Grp10)
4488{
4489 Log(("iemOp_Grp10 -> #UD\n"));
4490 return IEMOP_RAISE_INVALID_OPCODE();
4491}
4492
4493
4494/** Opcode 0x0f 0xba. */
4495FNIEMOP_DEF(iemOp_Grp8)
4496{
4497 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4498 PCIEMOPBINSIZES pImpl;
4499 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
4500 {
4501 case 0: case 1: case 2: case 3:
4502 return IEMOP_RAISE_INVALID_OPCODE();
4503 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break;
4504 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break;
4505 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break;
4506 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break;
4507 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4508 }
4509 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4510
4511 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4512 {
4513 /* register destination. */
4514 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
4515 IEMOP_HLP_NO_LOCK_PREFIX();
4516
4517 switch (pIemCpu->enmEffOpSize)
4518 {
4519 case IEMMODE_16BIT:
4520 IEM_MC_BEGIN(3, 0);
4521 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4522 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
4523 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4524
4525 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4526 IEM_MC_REF_EFLAGS(pEFlags);
4527 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4528
4529 IEM_MC_ADVANCE_RIP();
4530 IEM_MC_END();
4531 return VINF_SUCCESS;
4532
4533 case IEMMODE_32BIT:
4534 IEM_MC_BEGIN(3, 0);
4535 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4536 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
4537 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4538
4539 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4540 IEM_MC_REF_EFLAGS(pEFlags);
4541 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4542
4543 IEM_MC_ADVANCE_RIP();
4544 IEM_MC_END();
4545 return VINF_SUCCESS;
4546
4547 case IEMMODE_64BIT:
4548 IEM_MC_BEGIN(3, 0);
4549 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4550 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
4551 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4552
4553 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4554 IEM_MC_REF_EFLAGS(pEFlags);
4555 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4556
4557 IEM_MC_ADVANCE_RIP();
4558 IEM_MC_END();
4559 return VINF_SUCCESS;
4560
4561 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4562 }
4563 }
4564 else
4565 {
4566 /* memory destination. */
4567
4568 uint32_t fAccess;
4569 if (pImpl->pfnLockedU16)
4570 fAccess = IEM_ACCESS_DATA_RW;
4571 else /* BT */
4572 {
4573 IEMOP_HLP_NO_LOCK_PREFIX();
4574 fAccess = IEM_ACCESS_DATA_R;
4575 }
4576
4577 /** @todo test negative bit offsets! */
4578 switch (pIemCpu->enmEffOpSize)
4579 {
4580 case IEMMODE_16BIT:
4581 IEM_MC_BEGIN(3, 1);
4582 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4583 IEM_MC_ARG(uint16_t, u16Src, 1);
4584 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4586
4587 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4588 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
4589 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
4590 IEM_MC_FETCH_EFLAGS(EFlags);
4591 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4592 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4593 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4594 else
4595 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4596 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4597
4598 IEM_MC_COMMIT_EFLAGS(EFlags);
4599 IEM_MC_ADVANCE_RIP();
4600 IEM_MC_END();
4601 return VINF_SUCCESS;
4602
4603 case IEMMODE_32BIT:
4604 IEM_MC_BEGIN(3, 1);
4605 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4606 IEM_MC_ARG(uint32_t, u32Src, 1);
4607 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4609
4610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4611 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
4612 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
4613 IEM_MC_FETCH_EFLAGS(EFlags);
4614 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4615 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4616 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4617 else
4618 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4619 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4620
4621 IEM_MC_COMMIT_EFLAGS(EFlags);
4622 IEM_MC_ADVANCE_RIP();
4623 IEM_MC_END();
4624 return VINF_SUCCESS;
4625
4626 case IEMMODE_64BIT:
4627 IEM_MC_BEGIN(3, 1);
4628 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4629 IEM_MC_ARG(uint64_t, u64Src, 1);
4630 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4632
4633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4634 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
4635 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
4636 IEM_MC_FETCH_EFLAGS(EFlags);
4637 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4638 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4639 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4640 else
4641 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4642 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4643
4644 IEM_MC_COMMIT_EFLAGS(EFlags);
4645 IEM_MC_ADVANCE_RIP();
4646 IEM_MC_END();
4647 return VINF_SUCCESS;
4648
4649 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4650 }
4651 }
4652
4653}
4654
4655
4656/** Opcode 0x0f 0xbb. */
4657FNIEMOP_DEF(iemOp_btc_Ev_Gv)
4658{
4659 IEMOP_MNEMONIC("btc Ev,Gv");
4660 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
4661}
4662
4663
4664/** Opcode 0x0f 0xbc. */
4665FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
4666{
4667 IEMOP_MNEMONIC("bsf Gv,Ev");
4668 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4669 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
4670}
4671
4672
4673/** Opcode 0x0f 0xbd. */
4674FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
4675{
4676 IEMOP_MNEMONIC("bsr Gv,Ev");
4677 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4678 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
4679}
4680
4681
4682/** Opcode 0x0f 0xbe. */
4683FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
4684{
4685 IEMOP_MNEMONIC("movsx Gv,Eb");
4686
4687 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4688 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4689
4690 /*
4691 * If rm is denoting a register, no more instruction bytes.
4692 */
4693 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4694 {
4695 switch (pIemCpu->enmEffOpSize)
4696 {
4697 case IEMMODE_16BIT:
4698 IEM_MC_BEGIN(0, 1);
4699 IEM_MC_LOCAL(uint16_t, u16Value);
4700 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4701 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
4702 IEM_MC_ADVANCE_RIP();
4703 IEM_MC_END();
4704 return VINF_SUCCESS;
4705
4706 case IEMMODE_32BIT:
4707 IEM_MC_BEGIN(0, 1);
4708 IEM_MC_LOCAL(uint32_t, u32Value);
4709 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4710 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4711 IEM_MC_ADVANCE_RIP();
4712 IEM_MC_END();
4713 return VINF_SUCCESS;
4714
4715 case IEMMODE_64BIT:
4716 IEM_MC_BEGIN(0, 1);
4717 IEM_MC_LOCAL(uint64_t, u64Value);
4718 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4719 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4720 IEM_MC_ADVANCE_RIP();
4721 IEM_MC_END();
4722 return VINF_SUCCESS;
4723
4724 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4725 }
4726 }
4727 else
4728 {
4729 /*
4730 * We're loading a register from memory.
4731 */
4732 switch (pIemCpu->enmEffOpSize)
4733 {
4734 case IEMMODE_16BIT:
4735 IEM_MC_BEGIN(0, 2);
4736 IEM_MC_LOCAL(uint16_t, u16Value);
4737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4739 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
4740 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
4741 IEM_MC_ADVANCE_RIP();
4742 IEM_MC_END();
4743 return VINF_SUCCESS;
4744
4745 case IEMMODE_32BIT:
4746 IEM_MC_BEGIN(0, 2);
4747 IEM_MC_LOCAL(uint32_t, u32Value);
4748 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4750 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
4751 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4752 IEM_MC_ADVANCE_RIP();
4753 IEM_MC_END();
4754 return VINF_SUCCESS;
4755
4756 case IEMMODE_64BIT:
4757 IEM_MC_BEGIN(0, 2);
4758 IEM_MC_LOCAL(uint64_t, u64Value);
4759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4761 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
4762 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4763 IEM_MC_ADVANCE_RIP();
4764 IEM_MC_END();
4765 return VINF_SUCCESS;
4766
4767 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4768 }
4769 }
4770}
4771
4772
4773/** Opcode 0x0f 0xbf. */
4774FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
4775{
4776 IEMOP_MNEMONIC("movsx Gv,Ew");
4777
4778 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4779 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4780
4781 /** @todo Not entirely sure how the operand size prefix is handled here,
4782 * assuming that it will be ignored. Would be nice to have a few
4783 * test for this. */
4784 /*
4785 * If rm is denoting a register, no more instruction bytes.
4786 */
4787 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4788 {
4789 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
4790 {
4791 IEM_MC_BEGIN(0, 1);
4792 IEM_MC_LOCAL(uint32_t, u32Value);
4793 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4794 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4795 IEM_MC_ADVANCE_RIP();
4796 IEM_MC_END();
4797 }
4798 else
4799 {
4800 IEM_MC_BEGIN(0, 1);
4801 IEM_MC_LOCAL(uint64_t, u64Value);
4802 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4803 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4804 IEM_MC_ADVANCE_RIP();
4805 IEM_MC_END();
4806 }
4807 }
4808 else
4809 {
4810 /*
4811 * We're loading a register from memory.
4812 */
4813 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
4814 {
4815 IEM_MC_BEGIN(0, 2);
4816 IEM_MC_LOCAL(uint32_t, u32Value);
4817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4819 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
4820 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
4821 IEM_MC_ADVANCE_RIP();
4822 IEM_MC_END();
4823 }
4824 else
4825 {
4826 IEM_MC_BEGIN(0, 2);
4827 IEM_MC_LOCAL(uint64_t, u64Value);
4828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4830 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
4831 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
4832 IEM_MC_ADVANCE_RIP();
4833 IEM_MC_END();
4834 }
4835 }
4836 return VINF_SUCCESS;
4837}
4838
4839
4840/** Opcode 0x0f 0xc0. */
4841FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
4842{
4843 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4844 IEMOP_MNEMONIC("xadd Eb,Gb");
4845
4846 /*
4847 * If rm is denoting a register, no more instruction bytes.
4848 */
4849 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4850 {
4851 IEMOP_HLP_NO_LOCK_PREFIX();
4852
4853 IEM_MC_BEGIN(3, 0);
4854 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
4855 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
4856 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4857
4858 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4859 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4860 IEM_MC_REF_EFLAGS(pEFlags);
4861 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
4862
4863 IEM_MC_ADVANCE_RIP();
4864 IEM_MC_END();
4865 }
4866 else
4867 {
4868 /*
4869 * We're accessing memory.
4870 */
4871 IEM_MC_BEGIN(3, 3);
4872 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
4873 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
4874 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
4875 IEM_MC_LOCAL(uint8_t, u8RegCopy);
4876 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4877
4878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4879 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
4880 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4881 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
4882 IEM_MC_FETCH_EFLAGS(EFlags);
4883 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4884 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
4885 else
4886 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
4887
4888 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
4889 IEM_MC_COMMIT_EFLAGS(EFlags);
4890 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8RegCopy);
4891 IEM_MC_ADVANCE_RIP();
4892 IEM_MC_END();
4893 return VINF_SUCCESS;
4894 }
4895 return VINF_SUCCESS;
4896}
4897
4898
4899/** Opcode 0x0f 0xc1. */
4900FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
4901{
4902 IEMOP_MNEMONIC("xadd Ev,Gv");
4903 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4904
4905 /*
4906 * If rm is denoting a register, no more instruction bytes.
4907 */
4908 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4909 {
4910 IEMOP_HLP_NO_LOCK_PREFIX();
4911
4912 switch (pIemCpu->enmEffOpSize)
4913 {
4914 case IEMMODE_16BIT:
4915 IEM_MC_BEGIN(3, 0);
4916 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4917 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
4918 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4919
4920 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4921 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4922 IEM_MC_REF_EFLAGS(pEFlags);
4923 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
4924
4925 IEM_MC_ADVANCE_RIP();
4926 IEM_MC_END();
4927 return VINF_SUCCESS;
4928
4929 case IEMMODE_32BIT:
4930 IEM_MC_BEGIN(3, 0);
4931 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4932 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
4933 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4934
4935 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4936 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4937 IEM_MC_REF_EFLAGS(pEFlags);
4938 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
4939
4940 IEM_MC_ADVANCE_RIP();
4941 IEM_MC_END();
4942 return VINF_SUCCESS;
4943
4944 case IEMMODE_64BIT:
4945 IEM_MC_BEGIN(3, 0);
4946 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4947 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
4948 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4949
4950 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4951 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4952 IEM_MC_REF_EFLAGS(pEFlags);
4953 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
4954
4955 IEM_MC_ADVANCE_RIP();
4956 IEM_MC_END();
4957 return VINF_SUCCESS;
4958
4959 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4960 }
4961 }
4962 else
4963 {
4964 /*
4965 * We're accessing memory.
4966 */
4967 switch (pIemCpu->enmEffOpSize)
4968 {
4969 case IEMMODE_16BIT:
4970 IEM_MC_BEGIN(3, 3);
4971 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4972 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
4973 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
4974 IEM_MC_LOCAL(uint16_t, u16RegCopy);
4975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4976
4977 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
4978 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
4979 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4980 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
4981 IEM_MC_FETCH_EFLAGS(EFlags);
4982 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4983 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
4984 else
4985 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
4986
4987 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4988 IEM_MC_COMMIT_EFLAGS(EFlags);
4989 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16RegCopy);
4990 IEM_MC_ADVANCE_RIP();
4991 IEM_MC_END();
4992 return VINF_SUCCESS;
4993
4994 case IEMMODE_32BIT:
4995 IEM_MC_BEGIN(3, 3);
4996 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4997 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
4998 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
4999 IEM_MC_LOCAL(uint32_t, u32RegCopy);
5000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5001
5002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
5003 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
5004 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5005 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
5006 IEM_MC_FETCH_EFLAGS(EFlags);
5007 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5008 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
5009 else
5010 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
5011
5012 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5013 IEM_MC_COMMIT_EFLAGS(EFlags);
5014 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32RegCopy);
5015 IEM_MC_ADVANCE_RIP();
5016 IEM_MC_END();
5017 return VINF_SUCCESS;
5018
5019 case IEMMODE_64BIT:
5020 IEM_MC_BEGIN(3, 3);
5021 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5022 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
5023 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5024 IEM_MC_LOCAL(uint64_t, u64RegCopy);
5025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5026
5027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
5028 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
5029 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5030 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
5031 IEM_MC_FETCH_EFLAGS(EFlags);
5032 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5033 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
5034 else
5035 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
5036
5037 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5038 IEM_MC_COMMIT_EFLAGS(EFlags);
5039 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64RegCopy);
5040 IEM_MC_ADVANCE_RIP();
5041 IEM_MC_END();
5042 return VINF_SUCCESS;
5043
5044 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5045 }
5046 }
5047}
5048
5049/** Opcode 0x0f 0xc2. */
5050FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
5051
5052/** Opcode 0x0f 0xc3. */
5053FNIEMOP_STUB(iemOp_movnti_My_Gy);
5054
5055/** Opcode 0x0f 0xc4. */
5056FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
5057
5058/** Opcode 0x0f 0xc5. */
5059FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
5060
5061/** Opcode 0x0f 0xc6. */
5062FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
5063
5064
5065/** Opcode 0x0f 0xc7 !11/1. */
5066FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
5067{
5068 IEMOP_MNEMONIC("cmpxchg8b Mq");
5069
5070 IEM_MC_BEGIN(4, 3);
5071 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
5072 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
5073 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
5074 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
5075 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
5076 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
5077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5078
5079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
5080 IEMOP_HLP_DONE_DECODING();
5081 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
5082
5083 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
5084 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
5085 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
5086
5087 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
5088 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
5089 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
5090
5091 IEM_MC_FETCH_EFLAGS(EFlags);
5092 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5093 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
5094 else
5095 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
5096
5097 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
5098 IEM_MC_COMMIT_EFLAGS(EFlags);
5099 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
5100 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
5101 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
5102 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
5103 IEM_MC_ENDIF();
5104 IEM_MC_ADVANCE_RIP();
5105
5106 IEM_MC_END();
5107 return VINF_SUCCESS;
5108}
5109
5110
5111/** Opcode REX.W 0x0f 0xc7 !11/1. */
5112FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
5113
5114/** Opcode 0x0f 0xc7 11/6. */
5115FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
5116
5117/** Opcode 0x0f 0xc7 !11/6. */
5118FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
5119
5120/** Opcode 0x66 0x0f 0xc7 !11/6. */
5121FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
5122
5123/** Opcode 0xf3 0x0f 0xc7 !11/6. */
5124FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
5125
5126/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
5127FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
5128
5129
5130/** Opcode 0x0f 0xc7. */
5131FNIEMOP_DEF(iemOp_Grp9)
5132{
5133 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
5134 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5135 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5136 {
5137 case 0: case 2: case 3: case 4: case 5:
5138 return IEMOP_RAISE_INVALID_OPCODE();
5139 case 1:
5140 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
5141 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
5142 || (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
5143 return IEMOP_RAISE_INVALID_OPCODE();
5144 if (bRm & IEM_OP_PRF_SIZE_REX_W)
5145 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
5146 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
5147 case 6:
5148 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5149 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
5150 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
5151 {
5152 case 0:
5153 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
5154 case IEM_OP_PRF_SIZE_OP:
5155 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
5156 case IEM_OP_PRF_REPZ:
5157 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
5158 default:
5159 return IEMOP_RAISE_INVALID_OPCODE();
5160 }
5161 case 7:
5162 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
5163 {
5164 case 0:
5165 case IEM_OP_PRF_REPZ:
5166 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
5167 default:
5168 return IEMOP_RAISE_INVALID_OPCODE();
5169 }
5170 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5171 }
5172}
5173
5174
5175/**
5176 * Common 'bswap register' helper.
5177 */
5178FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
5179{
5180 IEMOP_HLP_NO_LOCK_PREFIX();
5181 switch (pIemCpu->enmEffOpSize)
5182 {
5183 case IEMMODE_16BIT:
5184 IEM_MC_BEGIN(1, 0);
5185 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5186 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
5187 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
5188 IEM_MC_ADVANCE_RIP();
5189 IEM_MC_END();
5190 return VINF_SUCCESS;
5191
5192 case IEMMODE_32BIT:
5193 IEM_MC_BEGIN(1, 0);
5194 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5195 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
5196 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5197 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
5198 IEM_MC_ADVANCE_RIP();
5199 IEM_MC_END();
5200 return VINF_SUCCESS;
5201
5202 case IEMMODE_64BIT:
5203 IEM_MC_BEGIN(1, 0);
5204 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5205 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
5206 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
5207 IEM_MC_ADVANCE_RIP();
5208 IEM_MC_END();
5209 return VINF_SUCCESS;
5210
5211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5212 }
5213}
5214
5215
5216/** Opcode 0x0f 0xc8. */
5217FNIEMOP_DEF(iemOp_bswap_rAX_r8)
5218{
5219 IEMOP_MNEMONIC("bswap rAX/r8");
5220 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pIemCpu->uRexReg);
5221}
5222
5223
5224/** Opcode 0x0f 0xc9. */
5225FNIEMOP_DEF(iemOp_bswap_rCX_r9)
5226{
5227 IEMOP_MNEMONIC("bswap rCX/r9");
5228 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pIemCpu->uRexReg);
5229}
5230
5231
5232/** Opcode 0x0f 0xca. */
5233FNIEMOP_DEF(iemOp_bswap_rDX_r10)
5234{
5235 IEMOP_MNEMONIC("bswap rDX/r9");
5236 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pIemCpu->uRexReg);
5237}
5238
5239
5240/** Opcode 0x0f 0xcb. */
5241FNIEMOP_DEF(iemOp_bswap_rBX_r11)
5242{
5243 IEMOP_MNEMONIC("bswap rBX/r9");
5244 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pIemCpu->uRexReg);
5245}
5246
5247
5248/** Opcode 0x0f 0xcc. */
5249FNIEMOP_DEF(iemOp_bswap_rSP_r12)
5250{
5251 IEMOP_MNEMONIC("bswap rSP/r12");
5252 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pIemCpu->uRexReg);
5253}
5254
5255
5256/** Opcode 0x0f 0xcd. */
5257FNIEMOP_DEF(iemOp_bswap_rBP_r13)
5258{
5259 IEMOP_MNEMONIC("bswap rBP/r13");
5260 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pIemCpu->uRexReg);
5261}
5262
5263
5264/** Opcode 0x0f 0xce. */
5265FNIEMOP_DEF(iemOp_bswap_rSI_r14)
5266{
5267 IEMOP_MNEMONIC("bswap rSI/r14");
5268 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pIemCpu->uRexReg);
5269}
5270
5271
5272/** Opcode 0x0f 0xcf. */
5273FNIEMOP_DEF(iemOp_bswap_rDI_r15)
5274{
5275 IEMOP_MNEMONIC("bswap rDI/r15");
5276 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pIemCpu->uRexReg);
5277}
5278
5279
5280
5281/** Opcode 0x0f 0xd0. */
5282FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
5283/** Opcode 0x0f 0xd1. */
5284FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
5285/** Opcode 0x0f 0xd2. */
5286FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
5287/** Opcode 0x0f 0xd3. */
5288FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
5289/** Opcode 0x0f 0xd4. */
5290FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
5291/** Opcode 0x0f 0xd5. */
5292FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
5293/** Opcode 0x0f 0xd6. */
5294FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
5295/** Opcode 0x0f 0xd7. */
5296FNIEMOP_STUB(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq);
5297/** Opcode 0x0f 0xd8. */
5298FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
5299/** Opcode 0x0f 0xd9. */
5300FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
5301/** Opcode 0x0f 0xda. */
5302FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
5303/** Opcode 0x0f 0xdb. */
5304FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
5305/** Opcode 0x0f 0xdc. */
5306FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
5307/** Opcode 0x0f 0xdd. */
5308FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
5309/** Opcode 0x0f 0xde. */
5310FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
5311/** Opcode 0x0f 0xdf. */
5312FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
5313/** Opcode 0x0f 0xe0. */
5314FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
5315/** Opcode 0x0f 0xe1. */
5316FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
5317/** Opcode 0x0f 0xe2. */
5318FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
5319/** Opcode 0x0f 0xe3. */
5320FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
5321/** Opcode 0x0f 0xe4. */
5322FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
5323/** Opcode 0x0f 0xe5. */
5324FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
5325/** Opcode 0x0f 0xe6. */
5326FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
5327/** Opcode 0x0f 0xe7. */
5328FNIEMOP_STUB(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq);
5329/** Opcode 0x0f 0xe8. */
5330FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
5331/** Opcode 0x0f 0xe9. */
5332FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
5333/** Opcode 0x0f 0xea. */
5334FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
5335/** Opcode 0x0f 0xeb. */
5336FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
5337/** Opcode 0x0f 0xec. */
5338FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
5339/** Opcode 0x0f 0xed. */
5340FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
5341/** Opcode 0x0f 0xee. */
5342FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
5343/** Opcode 0x0f 0xef. */
5344FNIEMOP_STUB(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq);
5345/** Opcode 0x0f 0xf0. */
5346FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
5347/** Opcode 0x0f 0xf1. */
5348FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
5349/** Opcode 0x0f 0xf2. */
5350FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
5351/** Opcode 0x0f 0xf3. */
5352FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
5353/** Opcode 0x0f 0xf4. */
5354FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
5355/** Opcode 0x0f 0xf5. */
5356FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
5357/** Opcode 0x0f 0xf6. */
5358FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
5359/** Opcode 0x0f 0xf7. */
5360FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
5361/** Opcode 0x0f 0xf8. */
5362FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq);
5363/** Opcode 0x0f 0xf9. */
5364FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
5365/** Opcode 0x0f 0xfa. */
5366FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
5367/** Opcode 0x0f 0xfb. */
5368FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
5369/** Opcode 0x0f 0xfc. */
5370FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
5371/** Opcode 0x0f 0xfd. */
5372FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
5373/** Opcode 0x0f 0xfe. */
5374FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
5375
5376
5377const PFNIEMOP g_apfnTwoByteMap[256] =
5378{
5379 /* 0x00 */ iemOp_Grp6,
5380 /* 0x01 */ iemOp_Grp7,
5381 /* 0x02 */ iemOp_lar_Gv_Ew,
5382 /* 0x03 */ iemOp_lsl_Gv_Ew,
5383 /* 0x04 */ iemOp_Invalid,
5384 /* 0x05 */ iemOp_syscall,
5385 /* 0x06 */ iemOp_clts,
5386 /* 0x07 */ iemOp_sysret,
5387 /* 0x08 */ iemOp_invd,
5388 /* 0x09 */ iemOp_wbinvd,
5389 /* 0x0a */ iemOp_Invalid,
5390 /* 0x0b */ iemOp_ud2,
5391 /* 0x0c */ iemOp_Invalid,
5392 /* 0x0d */ iemOp_nop_Ev_GrpP,
5393 /* 0x0e */ iemOp_femms,
5394 /* 0x0f */ iemOp_3Dnow,
5395 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
5396 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
5397 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
5398 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
5399 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
5400 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
5401 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
5402 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
5403 /* 0x18 */ iemOp_prefetch_Grp16,
5404 /* 0x19 */ iemOp_nop_Ev,
5405 /* 0x1a */ iemOp_nop_Ev,
5406 /* 0x1b */ iemOp_nop_Ev,
5407 /* 0x1c */ iemOp_nop_Ev,
5408 /* 0x1d */ iemOp_nop_Ev,
5409 /* 0x1e */ iemOp_nop_Ev,
5410 /* 0x1f */ iemOp_nop_Ev,
5411 /* 0x20 */ iemOp_mov_Rd_Cd,
5412 /* 0x21 */ iemOp_mov_Rd_Dd,
5413 /* 0x22 */ iemOp_mov_Cd_Rd,
5414 /* 0x23 */ iemOp_mov_Dd_Rd,
5415 /* 0x24 */ iemOp_mov_Rd_Td,
5416 /* 0x25 */ iemOp_Invalid,
5417 /* 0x26 */ iemOp_mov_Td_Rd,
5418 /* 0x27 */ iemOp_Invalid,
5419 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
5420 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
5421 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
5422 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
5423 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
5424 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
5425 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
5426 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
5427 /* 0x30 */ iemOp_wrmsr,
5428 /* 0x31 */ iemOp_rdtsc,
5429 /* 0x32 */ iemOp_rdmsr,
5430 /* 0x33 */ iemOp_rdpmc,
5431 /* 0x34 */ iemOp_sysenter,
5432 /* 0x35 */ iemOp_sysexit,
5433 /* 0x36 */ iemOp_Invalid,
5434 /* 0x37 */ iemOp_getsec,
5435 /* 0x38 */ iemOp_3byte_Esc_A4,
5436 /* 0x39 */ iemOp_Invalid,
5437 /* 0x3a */ iemOp_3byte_Esc_A5,
5438 /* 0x3b */ iemOp_Invalid,
5439 /* 0x3c */ iemOp_movnti_Gv_Ev/*??*/,
5440 /* 0x3d */ iemOp_Invalid,
5441 /* 0x3e */ iemOp_Invalid,
5442 /* 0x3f */ iemOp_Invalid,
5443 /* 0x40 */ iemOp_cmovo_Gv_Ev,
5444 /* 0x41 */ iemOp_cmovno_Gv_Ev,
5445 /* 0x42 */ iemOp_cmovc_Gv_Ev,
5446 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
5447 /* 0x44 */ iemOp_cmove_Gv_Ev,
5448 /* 0x45 */ iemOp_cmovne_Gv_Ev,
5449 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
5450 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
5451 /* 0x48 */ iemOp_cmovs_Gv_Ev,
5452 /* 0x49 */ iemOp_cmovns_Gv_Ev,
5453 /* 0x4a */ iemOp_cmovp_Gv_Ev,
5454 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
5455 /* 0x4c */ iemOp_cmovl_Gv_Ev,
5456 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
5457 /* 0x4e */ iemOp_cmovle_Gv_Ev,
5458 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
5459 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
5460 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
5461 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
5462 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
5463 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
5464 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
5465 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
5466 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
5467 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
5468 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
5469 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
5470 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
5471 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
5472 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
5473 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
5474 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
5475 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
5476 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
5477 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
5478 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
5479 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
5480 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
5481 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
5482 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
5483 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
5484 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
5485 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
5486 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
5487 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
5488 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
5489 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
5490 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
5491 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
5492 /* 0x71 */ iemOp_Grp12,
5493 /* 0x72 */ iemOp_Grp13,
5494 /* 0x73 */ iemOp_Grp14,
5495 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
5496 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
5497 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
5498 /* 0x77 */ iemOp_emms,
5499 /* 0x78 */ iemOp_vmread_AmdGrp17,
5500 /* 0x79 */ iemOp_vmwrite,
5501 /* 0x7a */ iemOp_Invalid,
5502 /* 0x7b */ iemOp_Invalid,
5503 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
5504 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
5505 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
5506 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
5507 /* 0x80 */ iemOp_jo_Jv,
5508 /* 0x81 */ iemOp_jno_Jv,
5509 /* 0x82 */ iemOp_jc_Jv,
5510 /* 0x83 */ iemOp_jnc_Jv,
5511 /* 0x84 */ iemOp_je_Jv,
5512 /* 0x85 */ iemOp_jne_Jv,
5513 /* 0x86 */ iemOp_jbe_Jv,
5514 /* 0x87 */ iemOp_jnbe_Jv,
5515 /* 0x88 */ iemOp_js_Jv,
5516 /* 0x89 */ iemOp_jns_Jv,
5517 /* 0x8a */ iemOp_jp_Jv,
5518 /* 0x8b */ iemOp_jnp_Jv,
5519 /* 0x8c */ iemOp_jl_Jv,
5520 /* 0x8d */ iemOp_jnl_Jv,
5521 /* 0x8e */ iemOp_jle_Jv,
5522 /* 0x8f */ iemOp_jnle_Jv,
5523 /* 0x90 */ iemOp_seto_Eb,
5524 /* 0x91 */ iemOp_setno_Eb,
5525 /* 0x92 */ iemOp_setc_Eb,
5526 /* 0x93 */ iemOp_setnc_Eb,
5527 /* 0x94 */ iemOp_sete_Eb,
5528 /* 0x95 */ iemOp_setne_Eb,
5529 /* 0x96 */ iemOp_setbe_Eb,
5530 /* 0x97 */ iemOp_setnbe_Eb,
5531 /* 0x98 */ iemOp_sets_Eb,
5532 /* 0x99 */ iemOp_setns_Eb,
5533 /* 0x9a */ iemOp_setp_Eb,
5534 /* 0x9b */ iemOp_setnp_Eb,
5535 /* 0x9c */ iemOp_setl_Eb,
5536 /* 0x9d */ iemOp_setnl_Eb,
5537 /* 0x9e */ iemOp_setle_Eb,
5538 /* 0x9f */ iemOp_setnle_Eb,
5539 /* 0xa0 */ iemOp_push_fs,
5540 /* 0xa1 */ iemOp_pop_fs,
5541 /* 0xa2 */ iemOp_cpuid,
5542 /* 0xa3 */ iemOp_bt_Ev_Gv,
5543 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
5544 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
5545 /* 0xa6 */ iemOp_Invalid,
5546 /* 0xa7 */ iemOp_Invalid,
5547 /* 0xa8 */ iemOp_push_gs,
5548 /* 0xa9 */ iemOp_pop_gs,
5549 /* 0xaa */ iemOp_rsm,
5550 /* 0xab */ iemOp_bts_Ev_Gv,
5551 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
5552 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
5553 /* 0xae */ iemOp_Grp15,
5554 /* 0xaf */ iemOp_imul_Gv_Ev,
5555 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
5556 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
5557 /* 0xb2 */ iemOp_lss_Gv_Mp,
5558 /* 0xb3 */ iemOp_btr_Ev_Gv,
5559 /* 0xb4 */ iemOp_lfs_Gv_Mp,
5560 /* 0xb5 */ iemOp_lgs_Gv_Mp,
5561 /* 0xb6 */ iemOp_movzx_Gv_Eb,
5562 /* 0xb7 */ iemOp_movzx_Gv_Ew,
5563 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
5564 /* 0xb9 */ iemOp_Grp10,
5565 /* 0xba */ iemOp_Grp8,
5566 /* 0xbd */ iemOp_btc_Ev_Gv,
5567 /* 0xbc */ iemOp_bsf_Gv_Ev,
5568 /* 0xbd */ iemOp_bsr_Gv_Ev,
5569 /* 0xbe */ iemOp_movsx_Gv_Eb,
5570 /* 0xbf */ iemOp_movsx_Gv_Ew,
5571 /* 0xc0 */ iemOp_xadd_Eb_Gb,
5572 /* 0xc1 */ iemOp_xadd_Ev_Gv,
5573 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
5574 /* 0xc3 */ iemOp_movnti_My_Gy,
5575 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
5576 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
5577 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
5578 /* 0xc7 */ iemOp_Grp9,
5579 /* 0xc8 */ iemOp_bswap_rAX_r8,
5580 /* 0xc9 */ iemOp_bswap_rCX_r9,
5581 /* 0xca */ iemOp_bswap_rDX_r10,
5582 /* 0xcb */ iemOp_bswap_rBX_r11,
5583 /* 0xcc */ iemOp_bswap_rSP_r12,
5584 /* 0xcd */ iemOp_bswap_rBP_r13,
5585 /* 0xce */ iemOp_bswap_rSI_r14,
5586 /* 0xcf */ iemOp_bswap_rDI_r15,
5587 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
5588 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
5589 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
5590 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
5591 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
5592 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
5593 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
5594 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
5595 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
5596 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
5597 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
5598 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
5599 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
5600 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
5601 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
5602 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
5603 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
5604 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
5605 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
5606 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
5607 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
5608 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
5609 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
5610 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
5611 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
5612 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
5613 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
5614 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
5615 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
5616 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
5617 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
5618 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
5619 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
5620 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
5621 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
5622 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
5623 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
5624 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
5625 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
5626 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
5627 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
5628 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
5629 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
5630 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
5631 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
5632 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
5633 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
5634 /* 0xff */ iemOp_Invalid
5635};
5636
5637/** @} */
5638
5639
5640/** @name One byte opcodes.
5641 *
5642 * @{
5643 */
5644
5645/** Opcode 0x00. */
5646FNIEMOP_DEF(iemOp_add_Eb_Gb)
5647{
5648 IEMOP_MNEMONIC("add Eb,Gb");
5649 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
5650}
5651
5652
5653/** Opcode 0x01. */
5654FNIEMOP_DEF(iemOp_add_Ev_Gv)
5655{
5656 IEMOP_MNEMONIC("add Ev,Gv");
5657 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
5658}
5659
5660
5661/** Opcode 0x02. */
5662FNIEMOP_DEF(iemOp_add_Gb_Eb)
5663{
5664 IEMOP_MNEMONIC("add Gb,Eb");
5665 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
5666}
5667
5668
5669/** Opcode 0x03. */
5670FNIEMOP_DEF(iemOp_add_Gv_Ev)
5671{
5672 IEMOP_MNEMONIC("add Gv,Ev");
5673 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
5674}
5675
5676
5677/** Opcode 0x04. */
5678FNIEMOP_DEF(iemOp_add_Al_Ib)
5679{
5680 IEMOP_MNEMONIC("add al,Ib");
5681 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
5682}
5683
5684
5685/** Opcode 0x05. */
5686FNIEMOP_DEF(iemOp_add_eAX_Iz)
5687{
5688 IEMOP_MNEMONIC("add rAX,Iz");
5689 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
5690}
5691
5692
5693/** Opcode 0x06. */
5694FNIEMOP_DEF(iemOp_push_ES)
5695{
5696 IEMOP_MNEMONIC("push es");
5697 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
5698}
5699
5700
5701/** Opcode 0x07. */
5702FNIEMOP_DEF(iemOp_pop_ES)
5703{
5704 IEMOP_MNEMONIC("pop es");
5705 IEMOP_HLP_NO_64BIT();
5706 IEMOP_HLP_NO_LOCK_PREFIX();
5707 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pIemCpu->enmEffOpSize);
5708}
5709
5710
5711/** Opcode 0x08. */
5712FNIEMOP_DEF(iemOp_or_Eb_Gb)
5713{
5714 IEMOP_MNEMONIC("or Eb,Gb");
5715 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5716 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
5717}
5718
5719
5720/** Opcode 0x09. */
5721FNIEMOP_DEF(iemOp_or_Ev_Gv)
5722{
5723 IEMOP_MNEMONIC("or Ev,Gv ");
5724 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5725 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
5726}
5727
5728
5729/** Opcode 0x0a. */
5730FNIEMOP_DEF(iemOp_or_Gb_Eb)
5731{
5732 IEMOP_MNEMONIC("or Gb,Eb");
5733 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5734 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
5735}
5736
5737
5738/** Opcode 0x0b. */
5739FNIEMOP_DEF(iemOp_or_Gv_Ev)
5740{
5741 IEMOP_MNEMONIC("or Gv,Ev");
5742 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5743 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
5744}
5745
5746
5747/** Opcode 0x0c. */
5748FNIEMOP_DEF(iemOp_or_Al_Ib)
5749{
5750 IEMOP_MNEMONIC("or al,Ib");
5751 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5752 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
5753}
5754
5755
5756/** Opcode 0x0d. */
5757FNIEMOP_DEF(iemOp_or_eAX_Iz)
5758{
5759 IEMOP_MNEMONIC("or rAX,Iz");
5760 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5761 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
5762}
5763
5764
5765/** Opcode 0x0e. */
5766FNIEMOP_DEF(iemOp_push_CS)
5767{
5768 IEMOP_MNEMONIC("push cs");
5769 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
5770}
5771
5772
5773/** Opcode 0x0f. */
5774FNIEMOP_DEF(iemOp_2byteEscape)
5775{
5776 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5777 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
5778}
5779
5780/** Opcode 0x10. */
5781FNIEMOP_DEF(iemOp_adc_Eb_Gb)
5782{
5783 IEMOP_MNEMONIC("adc Eb,Gb");
5784 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
5785}
5786
5787
5788/** Opcode 0x11. */
5789FNIEMOP_DEF(iemOp_adc_Ev_Gv)
5790{
5791 IEMOP_MNEMONIC("adc Ev,Gv");
5792 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
5793}
5794
5795
5796/** Opcode 0x12. */
5797FNIEMOP_DEF(iemOp_adc_Gb_Eb)
5798{
5799 IEMOP_MNEMONIC("adc Gb,Eb");
5800 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
5801}
5802
5803
5804/** Opcode 0x13. */
5805FNIEMOP_DEF(iemOp_adc_Gv_Ev)
5806{
5807 IEMOP_MNEMONIC("adc Gv,Ev");
5808 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
5809}
5810
5811
5812/** Opcode 0x14. */
5813FNIEMOP_DEF(iemOp_adc_Al_Ib)
5814{
5815 IEMOP_MNEMONIC("adc al,Ib");
5816 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
5817}
5818
5819
5820/** Opcode 0x15. */
5821FNIEMOP_DEF(iemOp_adc_eAX_Iz)
5822{
5823 IEMOP_MNEMONIC("adc rAX,Iz");
5824 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
5825}
5826
5827
5828/** Opcode 0x16. */
5829FNIEMOP_DEF(iemOp_push_SS)
5830{
5831 IEMOP_MNEMONIC("push ss");
5832 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
5833}
5834
5835
5836/** Opcode 0x17. */
5837FNIEMOP_DEF(iemOp_pop_SS)
5838{
5839 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
5840 IEMOP_HLP_NO_LOCK_PREFIX();
5841 IEMOP_HLP_NO_64BIT();
5842 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pIemCpu->enmEffOpSize);
5843}
5844
5845
5846/** Opcode 0x18. */
5847FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
5848{
5849 IEMOP_MNEMONIC("sbb Eb,Gb");
5850 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
5851}
5852
5853
5854/** Opcode 0x19. */
5855FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
5856{
5857 IEMOP_MNEMONIC("sbb Ev,Gv");
5858 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
5859}
5860
5861
5862/** Opcode 0x1a. */
5863FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
5864{
5865 IEMOP_MNEMONIC("sbb Gb,Eb");
5866 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
5867}
5868
5869
5870/** Opcode 0x1b. */
5871FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
5872{
5873 IEMOP_MNEMONIC("sbb Gv,Ev");
5874 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
5875}
5876
5877
5878/** Opcode 0x1c. */
5879FNIEMOP_DEF(iemOp_sbb_Al_Ib)
5880{
5881 IEMOP_MNEMONIC("sbb al,Ib");
5882 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
5883}
5884
5885
5886/** Opcode 0x1d. */
5887FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
5888{
5889 IEMOP_MNEMONIC("sbb rAX,Iz");
5890 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
5891}
5892
5893
5894/** Opcode 0x1e. */
5895FNIEMOP_DEF(iemOp_push_DS)
5896{
5897 IEMOP_MNEMONIC("push ds");
5898 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
5899}
5900
5901
5902/** Opcode 0x1f. */
5903FNIEMOP_DEF(iemOp_pop_DS)
5904{
5905 IEMOP_MNEMONIC("pop ds");
5906 IEMOP_HLP_NO_LOCK_PREFIX();
5907 IEMOP_HLP_NO_64BIT();
5908 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pIemCpu->enmEffOpSize);
5909}
5910
5911
5912/** Opcode 0x20. */
5913FNIEMOP_DEF(iemOp_and_Eb_Gb)
5914{
5915 IEMOP_MNEMONIC("and Eb,Gb");
5916 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5917 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
5918}
5919
5920
5921/** Opcode 0x21. */
5922FNIEMOP_DEF(iemOp_and_Ev_Gv)
5923{
5924 IEMOP_MNEMONIC("and Ev,Gv");
5925 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5926 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
5927}
5928
5929
5930/** Opcode 0x22. */
5931FNIEMOP_DEF(iemOp_and_Gb_Eb)
5932{
5933 IEMOP_MNEMONIC("and Gb,Eb");
5934 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5935 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
5936}
5937
5938
5939/** Opcode 0x23. */
5940FNIEMOP_DEF(iemOp_and_Gv_Ev)
5941{
5942 IEMOP_MNEMONIC("and Gv,Ev");
5943 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5944 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
5945}
5946
5947
5948/** Opcode 0x24. */
5949FNIEMOP_DEF(iemOp_and_Al_Ib)
5950{
5951 IEMOP_MNEMONIC("and al,Ib");
5952 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5953 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
5954}
5955
5956
5957/** Opcode 0x25. */
5958FNIEMOP_DEF(iemOp_and_eAX_Iz)
5959{
5960 IEMOP_MNEMONIC("and rAX,Iz");
5961 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5962 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
5963}
5964
5965
5966/** Opcode 0x26. */
5967FNIEMOP_DEF(iemOp_seg_ES)
5968{
5969 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_ES;
5970 pIemCpu->iEffSeg = X86_SREG_ES;
5971
5972 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
5973 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
5974}
5975
5976
5977/** Opcode 0x27. */
5978FNIEMOP_STUB(iemOp_daa);
5979
5980
5981/** Opcode 0x28. */
5982FNIEMOP_DEF(iemOp_sub_Eb_Gb)
5983{
5984 IEMOP_MNEMONIC("sub Eb,Gb");
5985 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
5986}
5987
5988
5989/** Opcode 0x29. */
5990FNIEMOP_DEF(iemOp_sub_Ev_Gv)
5991{
5992 IEMOP_MNEMONIC("sub Ev,Gv");
5993 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
5994}
5995
5996
5997/** Opcode 0x2a. */
5998FNIEMOP_DEF(iemOp_sub_Gb_Eb)
5999{
6000 IEMOP_MNEMONIC("sub Gb,Eb");
6001 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
6002}
6003
6004
6005/** Opcode 0x2b. */
6006FNIEMOP_DEF(iemOp_sub_Gv_Ev)
6007{
6008 IEMOP_MNEMONIC("sub Gv,Ev");
6009 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
6010}
6011
6012
6013/** Opcode 0x2c. */
6014FNIEMOP_DEF(iemOp_sub_Al_Ib)
6015{
6016 IEMOP_MNEMONIC("sub al,Ib");
6017 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
6018}
6019
6020
6021/** Opcode 0x2d. */
6022FNIEMOP_DEF(iemOp_sub_eAX_Iz)
6023{
6024 IEMOP_MNEMONIC("sub rAX,Iz");
6025 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
6026}
6027
6028
6029/** Opcode 0x2e. */
6030FNIEMOP_DEF(iemOp_seg_CS)
6031{
6032 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_CS;
6033 pIemCpu->iEffSeg = X86_SREG_CS;
6034
6035 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6036 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6037}
6038
6039
6040/** Opcode 0x2f. */
6041FNIEMOP_STUB(iemOp_das);
6042
6043
6044/** Opcode 0x30. */
6045FNIEMOP_DEF(iemOp_xor_Eb_Gb)
6046{
6047 IEMOP_MNEMONIC("xor Eb,Gb");
6048 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6049 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
6050}
6051
6052
6053/** Opcode 0x31. */
6054FNIEMOP_DEF(iemOp_xor_Ev_Gv)
6055{
6056 IEMOP_MNEMONIC("xor Ev,Gv");
6057 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6058 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
6059}
6060
6061
6062/** Opcode 0x32. */
6063FNIEMOP_DEF(iemOp_xor_Gb_Eb)
6064{
6065 IEMOP_MNEMONIC("xor Gb,Eb");
6066 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6067 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
6068}
6069
6070
6071/** Opcode 0x33. */
6072FNIEMOP_DEF(iemOp_xor_Gv_Ev)
6073{
6074 IEMOP_MNEMONIC("xor Gv,Ev");
6075 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6076 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
6077}
6078
6079
6080/** Opcode 0x34. */
6081FNIEMOP_DEF(iemOp_xor_Al_Ib)
6082{
6083 IEMOP_MNEMONIC("xor al,Ib");
6084 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6085 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
6086}
6087
6088
6089/** Opcode 0x35. */
6090FNIEMOP_DEF(iemOp_xor_eAX_Iz)
6091{
6092 IEMOP_MNEMONIC("xor rAX,Iz");
6093 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6094 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
6095}
6096
6097
6098/** Opcode 0x36. */
6099FNIEMOP_DEF(iemOp_seg_SS)
6100{
6101 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_SS;
6102 pIemCpu->iEffSeg = X86_SREG_SS;
6103
6104 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6105 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6106}
6107
6108
6109/** Opcode 0x37. */
6110FNIEMOP_STUB(iemOp_aaa);
6111
6112
6113/** Opcode 0x38. */
6114FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
6115{
6116 IEMOP_MNEMONIC("cmp Eb,Gb");
6117 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
6118 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
6119}
6120
6121
6122/** Opcode 0x39. */
6123FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
6124{
6125 IEMOP_MNEMONIC("cmp Ev,Gv");
6126 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
6127 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
6128}
6129
6130
6131/** Opcode 0x3a. */
6132FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
6133{
6134 IEMOP_MNEMONIC("cmp Gb,Eb");
6135 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
6136}
6137
6138
6139/** Opcode 0x3b. */
6140FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
6141{
6142 IEMOP_MNEMONIC("cmp Gv,Ev");
6143 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
6144}
6145
6146
6147/** Opcode 0x3c. */
6148FNIEMOP_DEF(iemOp_cmp_Al_Ib)
6149{
6150 IEMOP_MNEMONIC("cmp al,Ib");
6151 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
6152}
6153
6154
6155/** Opcode 0x3d. */
6156FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
6157{
6158 IEMOP_MNEMONIC("cmp rAX,Iz");
6159 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
6160}
6161
6162
6163/** Opcode 0x3e. */
6164FNIEMOP_DEF(iemOp_seg_DS)
6165{
6166 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_DS;
6167 pIemCpu->iEffSeg = X86_SREG_DS;
6168
6169 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6170 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6171}
6172
6173
6174/** Opcode 0x3f. */
6175FNIEMOP_STUB(iemOp_aas);
6176
6177/**
6178 * Common 'inc/dec/not/neg register' helper.
6179 */
6180FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
6181{
6182 IEMOP_HLP_NO_LOCK_PREFIX();
6183 switch (pIemCpu->enmEffOpSize)
6184 {
6185 case IEMMODE_16BIT:
6186 IEM_MC_BEGIN(2, 0);
6187 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6188 IEM_MC_ARG(uint32_t *, pEFlags, 1);
6189 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
6190 IEM_MC_REF_EFLAGS(pEFlags);
6191 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
6192 IEM_MC_ADVANCE_RIP();
6193 IEM_MC_END();
6194 return VINF_SUCCESS;
6195
6196 case IEMMODE_32BIT:
6197 IEM_MC_BEGIN(2, 0);
6198 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6199 IEM_MC_ARG(uint32_t *, pEFlags, 1);
6200 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6201 IEM_MC_REF_EFLAGS(pEFlags);
6202 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
6203 IEM_MC_ADVANCE_RIP();
6204 IEM_MC_END();
6205 return VINF_SUCCESS;
6206
6207 case IEMMODE_64BIT:
6208 IEM_MC_BEGIN(2, 0);
6209 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6210 IEM_MC_ARG(uint32_t *, pEFlags, 1);
6211 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6212 IEM_MC_REF_EFLAGS(pEFlags);
6213 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
6214 IEM_MC_ADVANCE_RIP();
6215 IEM_MC_END();
6216 return VINF_SUCCESS;
6217 }
6218 return VINF_SUCCESS;
6219}
6220
6221
6222/** Opcode 0x40. */
6223FNIEMOP_DEF(iemOp_inc_eAX)
6224{
6225 /*
6226 * This is a REX prefix in 64-bit mode.
6227 */
6228 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6229 {
6230 pIemCpu->fPrefixes |= IEM_OP_PRF_REX;
6231
6232 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6233 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6234 }
6235
6236 IEMOP_MNEMONIC("inc eAX");
6237 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
6238}
6239
6240
6241/** Opcode 0x41. */
6242FNIEMOP_DEF(iemOp_inc_eCX)
6243{
6244 /*
6245 * This is a REX prefix in 64-bit mode.
6246 */
6247 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6248 {
6249 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
6250 pIemCpu->uRexB = 1 << 3;
6251
6252 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6253 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6254 }
6255
6256 IEMOP_MNEMONIC("inc eCX");
6257 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
6258}
6259
6260
6261/** Opcode 0x42. */
6262FNIEMOP_DEF(iemOp_inc_eDX)
6263{
6264 /*
6265 * This is a REX prefix in 64-bit mode.
6266 */
6267 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6268 {
6269 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
6270 pIemCpu->uRexIndex = 1 << 3;
6271
6272 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6273 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6274 }
6275
6276 IEMOP_MNEMONIC("inc eDX");
6277 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
6278}
6279
6280
6281
6282/** Opcode 0x43. */
6283FNIEMOP_DEF(iemOp_inc_eBX)
6284{
6285 /*
6286 * This is a REX prefix in 64-bit mode.
6287 */
6288 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6289 {
6290 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
6291 pIemCpu->uRexB = 1 << 3;
6292 pIemCpu->uRexIndex = 1 << 3;
6293
6294 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6295 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6296 }
6297
6298 IEMOP_MNEMONIC("inc eBX");
6299 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
6300}
6301
6302
6303/** Opcode 0x44. */
6304FNIEMOP_DEF(iemOp_inc_eSP)
6305{
6306 /*
6307 * This is a REX prefix in 64-bit mode.
6308 */
6309 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6310 {
6311 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
6312 pIemCpu->uRexReg = 1 << 3;
6313
6314 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6315 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6316 }
6317
6318 IEMOP_MNEMONIC("inc eSP");
6319 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
6320}
6321
6322
6323/** Opcode 0x45. */
6324FNIEMOP_DEF(iemOp_inc_eBP)
6325{
6326 /*
6327 * This is a REX prefix in 64-bit mode.
6328 */
6329 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6330 {
6331 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
6332 pIemCpu->uRexReg = 1 << 3;
6333 pIemCpu->uRexB = 1 << 3;
6334
6335 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6336 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6337 }
6338
6339 IEMOP_MNEMONIC("inc eBP");
6340 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
6341}
6342
6343
6344/** Opcode 0x46. */
6345FNIEMOP_DEF(iemOp_inc_eSI)
6346{
6347 /*
6348 * This is a REX prefix in 64-bit mode.
6349 */
6350 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6351 {
6352 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
6353 pIemCpu->uRexReg = 1 << 3;
6354 pIemCpu->uRexIndex = 1 << 3;
6355
6356 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6357 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6358 }
6359
6360 IEMOP_MNEMONIC("inc eSI");
6361 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
6362}
6363
6364
6365/** Opcode 0x47. */
6366FNIEMOP_DEF(iemOp_inc_eDI)
6367{
6368 /*
6369 * This is a REX prefix in 64-bit mode.
6370 */
6371 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6372 {
6373 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
6374 pIemCpu->uRexReg = 1 << 3;
6375 pIemCpu->uRexB = 1 << 3;
6376 pIemCpu->uRexIndex = 1 << 3;
6377
6378 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6379 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6380 }
6381
6382 IEMOP_MNEMONIC("inc eDI");
6383 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
6384}
6385
6386
6387/** Opcode 0x48. */
6388FNIEMOP_DEF(iemOp_dec_eAX)
6389{
6390 /*
6391 * This is a REX prefix in 64-bit mode.
6392 */
6393 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6394 {
6395 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
6396 iemRecalEffOpSize(pIemCpu);
6397
6398 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6399 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6400 }
6401
6402 IEMOP_MNEMONIC("dec eAX");
6403 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
6404}
6405
6406
6407/** Opcode 0x49. */
6408FNIEMOP_DEF(iemOp_dec_eCX)
6409{
6410 /*
6411 * This is a REX prefix in 64-bit mode.
6412 */
6413 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6414 {
6415 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
6416 pIemCpu->uRexB = 1 << 3;
6417 iemRecalEffOpSize(pIemCpu);
6418
6419 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6420 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6421 }
6422
6423 IEMOP_MNEMONIC("dec eCX");
6424 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
6425}
6426
6427
6428/** Opcode 0x4a. */
6429FNIEMOP_DEF(iemOp_dec_eDX)
6430{
6431 /*
6432 * This is a REX prefix in 64-bit mode.
6433 */
6434 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6435 {
6436 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
6437 pIemCpu->uRexIndex = 1 << 3;
6438 iemRecalEffOpSize(pIemCpu);
6439
6440 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6441 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6442 }
6443
6444 IEMOP_MNEMONIC("dec eDX");
6445 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
6446}
6447
6448
6449/** Opcode 0x4b. */
6450FNIEMOP_DEF(iemOp_dec_eBX)
6451{
6452 /*
6453 * This is a REX prefix in 64-bit mode.
6454 */
6455 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6456 {
6457 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
6458 pIemCpu->uRexB = 1 << 3;
6459 pIemCpu->uRexIndex = 1 << 3;
6460 iemRecalEffOpSize(pIemCpu);
6461
6462 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6463 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6464 }
6465
6466 IEMOP_MNEMONIC("dec eBX");
6467 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
6468}
6469
6470
6471/** Opcode 0x4c. */
6472FNIEMOP_DEF(iemOp_dec_eSP)
6473{
6474 /*
6475 * This is a REX prefix in 64-bit mode.
6476 */
6477 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6478 {
6479 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
6480 pIemCpu->uRexReg = 1 << 3;
6481 iemRecalEffOpSize(pIemCpu);
6482
6483 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6484 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6485 }
6486
6487 IEMOP_MNEMONIC("dec eSP");
6488 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
6489}
6490
6491
6492/** Opcode 0x4d. */
6493FNIEMOP_DEF(iemOp_dec_eBP)
6494{
6495 /*
6496 * This is a REX prefix in 64-bit mode.
6497 */
6498 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6499 {
6500 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
6501 pIemCpu->uRexReg = 1 << 3;
6502 pIemCpu->uRexB = 1 << 3;
6503 iemRecalEffOpSize(pIemCpu);
6504
6505 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6506 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6507 }
6508
6509 IEMOP_MNEMONIC("dec eBP");
6510 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
6511}
6512
6513
6514/** Opcode 0x4e. */
6515FNIEMOP_DEF(iemOp_dec_eSI)
6516{
6517 /*
6518 * This is a REX prefix in 64-bit mode.
6519 */
6520 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6521 {
6522 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
6523 pIemCpu->uRexReg = 1 << 3;
6524 pIemCpu->uRexIndex = 1 << 3;
6525 iemRecalEffOpSize(pIemCpu);
6526
6527 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6528 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6529 }
6530
6531 IEMOP_MNEMONIC("dec eSI");
6532 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
6533}
6534
6535
6536/** Opcode 0x4f. */
6537FNIEMOP_DEF(iemOp_dec_eDI)
6538{
6539 /*
6540 * This is a REX prefix in 64-bit mode.
6541 */
6542 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6543 {
6544 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
6545 pIemCpu->uRexReg = 1 << 3;
6546 pIemCpu->uRexB = 1 << 3;
6547 pIemCpu->uRexIndex = 1 << 3;
6548 iemRecalEffOpSize(pIemCpu);
6549
6550 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6551 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6552 }
6553
6554 IEMOP_MNEMONIC("dec eDI");
6555 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
6556}
6557
6558
6559/**
6560 * Common 'push register' helper.
6561 */
6562FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
6563{
6564 IEMOP_HLP_NO_LOCK_PREFIX();
6565 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6566 {
6567 iReg |= pIemCpu->uRexB;
6568 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
6569 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
6570 }
6571
6572 switch (pIemCpu->enmEffOpSize)
6573 {
6574 case IEMMODE_16BIT:
6575 IEM_MC_BEGIN(0, 1);
6576 IEM_MC_LOCAL(uint16_t, u16Value);
6577 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
6578 IEM_MC_PUSH_U16(u16Value);
6579 IEM_MC_ADVANCE_RIP();
6580 IEM_MC_END();
6581 break;
6582
6583 case IEMMODE_32BIT:
6584 IEM_MC_BEGIN(0, 1);
6585 IEM_MC_LOCAL(uint32_t, u32Value);
6586 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
6587 IEM_MC_PUSH_U32(u32Value);
6588 IEM_MC_ADVANCE_RIP();
6589 IEM_MC_END();
6590 break;
6591
6592 case IEMMODE_64BIT:
6593 IEM_MC_BEGIN(0, 1);
6594 IEM_MC_LOCAL(uint64_t, u64Value);
6595 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
6596 IEM_MC_PUSH_U64(u64Value);
6597 IEM_MC_ADVANCE_RIP();
6598 IEM_MC_END();
6599 break;
6600 }
6601
6602 return VINF_SUCCESS;
6603}
6604
6605
6606/** Opcode 0x50. */
6607FNIEMOP_DEF(iemOp_push_eAX)
6608{
6609 IEMOP_MNEMONIC("push rAX");
6610 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
6611}
6612
6613
6614/** Opcode 0x51. */
6615FNIEMOP_DEF(iemOp_push_eCX)
6616{
6617 IEMOP_MNEMONIC("push rCX");
6618 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
6619}
6620
6621
6622/** Opcode 0x52. */
6623FNIEMOP_DEF(iemOp_push_eDX)
6624{
6625 IEMOP_MNEMONIC("push rDX");
6626 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
6627}
6628
6629
6630/** Opcode 0x53. */
6631FNIEMOP_DEF(iemOp_push_eBX)
6632{
6633 IEMOP_MNEMONIC("push rBX");
6634 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
6635}
6636
6637
6638/** Opcode 0x54. */
6639FNIEMOP_DEF(iemOp_push_eSP)
6640{
6641 IEMOP_MNEMONIC("push rSP");
6642 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
6643}
6644
6645
6646/** Opcode 0x55. */
6647FNIEMOP_DEF(iemOp_push_eBP)
6648{
6649 IEMOP_MNEMONIC("push rBP");
6650 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
6651}
6652
6653
6654/** Opcode 0x56. */
6655FNIEMOP_DEF(iemOp_push_eSI)
6656{
6657 IEMOP_MNEMONIC("push rSI");
6658 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
6659}
6660
6661
6662/** Opcode 0x57. */
6663FNIEMOP_DEF(iemOp_push_eDI)
6664{
6665 IEMOP_MNEMONIC("push rDI");
6666 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
6667}
6668
6669
6670/**
6671 * Common 'pop register' helper.
6672 */
6673FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
6674{
6675 IEMOP_HLP_NO_LOCK_PREFIX();
6676 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
6677 {
6678 iReg |= pIemCpu->uRexB;
6679 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
6680 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
6681 }
6682
6683/** @todo How does this code handle iReg==X86_GREG_xSP. How does a real CPU
6684 * handle it, for that matter (Intel pseudo code hints that the popped
6685 * value is incremented by the stack item size.) Test it, both encodings
6686 * and all three register sizes. */
6687 switch (pIemCpu->enmEffOpSize)
6688 {
6689 case IEMMODE_16BIT:
6690 IEM_MC_BEGIN(0, 1);
6691 IEM_MC_LOCAL(uint16_t, *pu16Dst);
6692 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
6693 IEM_MC_POP_U16(pu16Dst);
6694 IEM_MC_ADVANCE_RIP();
6695 IEM_MC_END();
6696 break;
6697
6698 case IEMMODE_32BIT:
6699 IEM_MC_BEGIN(0, 1);
6700 IEM_MC_LOCAL(uint32_t, *pu32Dst);
6701 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6702 IEM_MC_POP_U32(pu32Dst);
6703 IEM_MC_ADVANCE_RIP();
6704 IEM_MC_END();
6705 break;
6706
6707 case IEMMODE_64BIT:
6708 IEM_MC_BEGIN(0, 1);
6709 IEM_MC_LOCAL(uint64_t, *pu64Dst);
6710 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6711 IEM_MC_POP_U64(pu64Dst);
6712 IEM_MC_ADVANCE_RIP();
6713 IEM_MC_END();
6714 break;
6715 }
6716
6717 return VINF_SUCCESS;
6718}
6719
6720
6721/** Opcode 0x58. */
6722FNIEMOP_DEF(iemOp_pop_eAX)
6723{
6724 IEMOP_MNEMONIC("pop rAX");
6725 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
6726}
6727
6728
6729/** Opcode 0x59. */
6730FNIEMOP_DEF(iemOp_pop_eCX)
6731{
6732 IEMOP_MNEMONIC("pop rCX");
6733 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
6734}
6735
6736
6737/** Opcode 0x5a. */
6738FNIEMOP_DEF(iemOp_pop_eDX)
6739{
6740 IEMOP_MNEMONIC("pop rDX");
6741 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
6742}
6743
6744
6745/** Opcode 0x5b. */
6746FNIEMOP_DEF(iemOp_pop_eBX)
6747{
6748 IEMOP_MNEMONIC("pop rBX");
6749 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
6750}
6751
6752
6753/** Opcode 0x5c. */
6754FNIEMOP_DEF(iemOp_pop_eSP)
6755{
6756 IEMOP_MNEMONIC("pop rSP");
6757 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
6758}
6759
6760
6761/** Opcode 0x5d. */
6762FNIEMOP_DEF(iemOp_pop_eBP)
6763{
6764 IEMOP_MNEMONIC("pop rBP");
6765 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
6766}
6767
6768
6769/** Opcode 0x5e. */
6770FNIEMOP_DEF(iemOp_pop_eSI)
6771{
6772 IEMOP_MNEMONIC("pop rSI");
6773 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
6774}
6775
6776
6777/** Opcode 0x5f. */
6778FNIEMOP_DEF(iemOp_pop_eDI)
6779{
6780 IEMOP_MNEMONIC("pop rDI");
6781 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
6782}
6783
6784
6785/** Opcode 0x60. */
6786FNIEMOP_DEF(iemOp_pusha)
6787{
6788 IEMOP_MNEMONIC("pusha");
6789 IEMOP_HLP_NO_64BIT();
6790 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
6791 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
6792 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
6793 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
6794}
6795
6796
6797/** Opcode 0x61. */
6798FNIEMOP_DEF(iemOp_popa)
6799{
6800 IEMOP_MNEMONIC("popa");
6801 IEMOP_HLP_NO_64BIT();
6802 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
6803 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
6804 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
6805 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
6806}
6807
6808
6809/** Opcode 0x62. */
6810FNIEMOP_STUB(iemOp_bound_Gv_Ma);
6811/** Opcode 0x63. */
6812FNIEMOP_STUB(iemOp_arpl_Ew_Gw);
6813
6814
6815/** Opcode 0x64. */
6816FNIEMOP_DEF(iemOp_seg_FS)
6817{
6818 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_FS;
6819 pIemCpu->iEffSeg = X86_SREG_FS;
6820
6821 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6822 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6823}
6824
6825
6826/** Opcode 0x65. */
6827FNIEMOP_DEF(iemOp_seg_GS)
6828{
6829 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_GS;
6830 pIemCpu->iEffSeg = X86_SREG_GS;
6831
6832 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6833 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6834}
6835
6836
6837/** Opcode 0x66. */
6838FNIEMOP_DEF(iemOp_op_size)
6839{
6840 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_OP;
6841 iemRecalEffOpSize(pIemCpu);
6842
6843 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6844 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6845}
6846
6847
6848/** Opcode 0x67. */
6849FNIEMOP_DEF(iemOp_addr_size)
6850{
6851 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
6852 switch (pIemCpu->enmDefAddrMode)
6853 {
6854 case IEMMODE_16BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
6855 case IEMMODE_32BIT: pIemCpu->enmEffAddrMode = IEMMODE_16BIT; break;
6856 case IEMMODE_64BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
6857 default: AssertFailed();
6858 }
6859
6860 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
6861 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
6862}
6863
6864
6865/** Opcode 0x68. */
6866FNIEMOP_DEF(iemOp_push_Iz)
6867{
6868 IEMOP_MNEMONIC("push Iz");
6869 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6870 switch (pIemCpu->enmEffOpSize)
6871 {
6872 case IEMMODE_16BIT:
6873 {
6874 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6875 IEMOP_HLP_NO_LOCK_PREFIX();
6876 IEM_MC_BEGIN(0,0);
6877 IEM_MC_PUSH_U16(u16Imm);
6878 IEM_MC_ADVANCE_RIP();
6879 IEM_MC_END();
6880 return VINF_SUCCESS;
6881 }
6882
6883 case IEMMODE_32BIT:
6884 {
6885 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6886 IEMOP_HLP_NO_LOCK_PREFIX();
6887 IEM_MC_BEGIN(0,0);
6888 IEM_MC_PUSH_U32(u32Imm);
6889 IEM_MC_ADVANCE_RIP();
6890 IEM_MC_END();
6891 return VINF_SUCCESS;
6892 }
6893
6894 case IEMMODE_64BIT:
6895 {
6896 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6897 IEMOP_HLP_NO_LOCK_PREFIX();
6898 IEM_MC_BEGIN(0,0);
6899 IEM_MC_PUSH_U64(u64Imm);
6900 IEM_MC_ADVANCE_RIP();
6901 IEM_MC_END();
6902 return VINF_SUCCESS;
6903 }
6904
6905 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6906 }
6907}
6908
6909
6910/** Opcode 0x69. */
6911FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
6912{
6913 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
6914 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6915 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
6916
6917 switch (pIemCpu->enmEffOpSize)
6918 {
6919 case IEMMODE_16BIT:
6920 {
6921 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6922 {
6923 /* register operand */
6924 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6926
6927 IEM_MC_BEGIN(3, 1);
6928 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6929 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
6930 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6931 IEM_MC_LOCAL(uint16_t, u16Tmp);
6932
6933 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6934 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
6935 IEM_MC_REF_EFLAGS(pEFlags);
6936 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
6937 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
6938
6939 IEM_MC_ADVANCE_RIP();
6940 IEM_MC_END();
6941 }
6942 else
6943 {
6944 /* memory operand */
6945 IEM_MC_BEGIN(3, 2);
6946 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6947 IEM_MC_ARG(uint16_t, u16Src, 1);
6948 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6949 IEM_MC_LOCAL(uint16_t, u16Tmp);
6950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6951
6952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
6953 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6954 IEM_MC_ASSIGN(u16Src, u16Imm);
6955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6956 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
6957 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
6958 IEM_MC_REF_EFLAGS(pEFlags);
6959 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
6960 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
6961
6962 IEM_MC_ADVANCE_RIP();
6963 IEM_MC_END();
6964 }
6965 return VINF_SUCCESS;
6966 }
6967
6968 case IEMMODE_32BIT:
6969 {
6970 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6971 {
6972 /* register operand */
6973 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6975
6976 IEM_MC_BEGIN(3, 1);
6977 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6978 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
6979 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6980 IEM_MC_LOCAL(uint32_t, u32Tmp);
6981
6982 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6983 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
6984 IEM_MC_REF_EFLAGS(pEFlags);
6985 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
6986 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
6987
6988 IEM_MC_ADVANCE_RIP();
6989 IEM_MC_END();
6990 }
6991 else
6992 {
6993 /* memory operand */
6994 IEM_MC_BEGIN(3, 2);
6995 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6996 IEM_MC_ARG(uint32_t, u32Src, 1);
6997 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6998 IEM_MC_LOCAL(uint32_t, u32Tmp);
6999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7000
7001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7002 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7003 IEM_MC_ASSIGN(u32Src, u32Imm);
7004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7005 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
7006 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
7007 IEM_MC_REF_EFLAGS(pEFlags);
7008 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
7009 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
7010
7011 IEM_MC_ADVANCE_RIP();
7012 IEM_MC_END();
7013 }
7014 return VINF_SUCCESS;
7015 }
7016
7017 case IEMMODE_64BIT:
7018 {
7019 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7020 {
7021 /* register operand */
7022 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7024
7025 IEM_MC_BEGIN(3, 1);
7026 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7027 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
7028 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7029 IEM_MC_LOCAL(uint64_t, u64Tmp);
7030
7031 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7032 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
7033 IEM_MC_REF_EFLAGS(pEFlags);
7034 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
7035 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
7036
7037 IEM_MC_ADVANCE_RIP();
7038 IEM_MC_END();
7039 }
7040 else
7041 {
7042 /* memory operand */
7043 IEM_MC_BEGIN(3, 2);
7044 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7045 IEM_MC_ARG(uint64_t, u64Src, 1);
7046 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7047 IEM_MC_LOCAL(uint64_t, u64Tmp);
7048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7049
7050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7051 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7052 IEM_MC_ASSIGN(u64Src, u64Imm);
7053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7054 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
7055 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
7056 IEM_MC_REF_EFLAGS(pEFlags);
7057 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
7058 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
7059
7060 IEM_MC_ADVANCE_RIP();
7061 IEM_MC_END();
7062 }
7063 return VINF_SUCCESS;
7064 }
7065 }
7066 AssertFailedReturn(VERR_INTERNAL_ERROR_3);
7067}
7068
7069
7070/** Opcode 0x6a. */
7071FNIEMOP_DEF(iemOp_push_Ib)
7072{
7073 IEMOP_MNEMONIC("push Ib");
7074 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7075 IEMOP_HLP_NO_LOCK_PREFIX();
7076 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7077
7078 IEM_MC_BEGIN(0,0);
7079 switch (pIemCpu->enmEffOpSize)
7080 {
7081 case IEMMODE_16BIT:
7082 IEM_MC_PUSH_U16(i8Imm);
7083 break;
7084 case IEMMODE_32BIT:
7085 IEM_MC_PUSH_U32(i8Imm);
7086 break;
7087 case IEMMODE_64BIT:
7088 IEM_MC_PUSH_U64(i8Imm);
7089 break;
7090 }
7091 IEM_MC_ADVANCE_RIP();
7092 IEM_MC_END();
7093 return VINF_SUCCESS;
7094}
7095
7096
7097/** Opcode 0x6b. */
7098FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
7099{
7100 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
7101 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7102 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
7103
7104 switch (pIemCpu->enmEffOpSize)
7105 {
7106 case IEMMODE_16BIT:
7107 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7108 {
7109 /* register operand */
7110 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7112
7113 IEM_MC_BEGIN(3, 1);
7114 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7115 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
7116 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7117 IEM_MC_LOCAL(uint16_t, u16Tmp);
7118
7119 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7120 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
7121 IEM_MC_REF_EFLAGS(pEFlags);
7122 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
7123 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
7124
7125 IEM_MC_ADVANCE_RIP();
7126 IEM_MC_END();
7127 }
7128 else
7129 {
7130 /* memory operand */
7131 IEM_MC_BEGIN(3, 2);
7132 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7133 IEM_MC_ARG(uint16_t, u16Src, 1);
7134 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7135 IEM_MC_LOCAL(uint16_t, u16Tmp);
7136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7137
7138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7139 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
7140 IEM_MC_ASSIGN(u16Src, u16Imm);
7141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7142 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
7143 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
7144 IEM_MC_REF_EFLAGS(pEFlags);
7145 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
7146 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
7147
7148 IEM_MC_ADVANCE_RIP();
7149 IEM_MC_END();
7150 }
7151 return VINF_SUCCESS;
7152
7153 case IEMMODE_32BIT:
7154 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7155 {
7156 /* register operand */
7157 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7159
7160 IEM_MC_BEGIN(3, 1);
7161 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7162 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
7163 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7164 IEM_MC_LOCAL(uint32_t, u32Tmp);
7165
7166 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7167 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
7168 IEM_MC_REF_EFLAGS(pEFlags);
7169 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
7170 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
7171
7172 IEM_MC_ADVANCE_RIP();
7173 IEM_MC_END();
7174 }
7175 else
7176 {
7177 /* memory operand */
7178 IEM_MC_BEGIN(3, 2);
7179 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7180 IEM_MC_ARG(uint32_t, u32Src, 1);
7181 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7182 IEM_MC_LOCAL(uint32_t, u32Tmp);
7183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7184
7185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7186 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
7187 IEM_MC_ASSIGN(u32Src, u32Imm);
7188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7189 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
7190 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
7191 IEM_MC_REF_EFLAGS(pEFlags);
7192 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
7193 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
7194
7195 IEM_MC_ADVANCE_RIP();
7196 IEM_MC_END();
7197 }
7198 return VINF_SUCCESS;
7199
7200 case IEMMODE_64BIT:
7201 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7202 {
7203 /* register operand */
7204 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7206
7207 IEM_MC_BEGIN(3, 1);
7208 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7209 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
7210 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7211 IEM_MC_LOCAL(uint64_t, u64Tmp);
7212
7213 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7214 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
7215 IEM_MC_REF_EFLAGS(pEFlags);
7216 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
7217 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
7218
7219 IEM_MC_ADVANCE_RIP();
7220 IEM_MC_END();
7221 }
7222 else
7223 {
7224 /* memory operand */
7225 IEM_MC_BEGIN(3, 2);
7226 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7227 IEM_MC_ARG(uint64_t, u64Src, 1);
7228 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7229 IEM_MC_LOCAL(uint64_t, u64Tmp);
7230 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7231
7232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7233 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
7234 IEM_MC_ASSIGN(u64Src, u64Imm);
7235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7236 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
7237 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
7238 IEM_MC_REF_EFLAGS(pEFlags);
7239 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
7240 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
7241
7242 IEM_MC_ADVANCE_RIP();
7243 IEM_MC_END();
7244 }
7245 return VINF_SUCCESS;
7246 }
7247 AssertFailedReturn(VERR_INTERNAL_ERROR_3);
7248}
7249
7250
7251/** Opcode 0x6c. */
7252FNIEMOP_DEF(iemOp_insb_Yb_DX)
7253{
7254 IEMOP_HLP_NO_LOCK_PREFIX();
7255 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7256 {
7257 IEMOP_MNEMONIC("rep ins Yb,DX");
7258 switch (pIemCpu->enmEffAddrMode)
7259 {
7260 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op8_addr16);
7261 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op8_addr32);
7262 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op8_addr64);
7263 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7264 }
7265 }
7266 else
7267 {
7268 IEMOP_MNEMONIC("ins Yb,DX");
7269 switch (pIemCpu->enmEffAddrMode)
7270 {
7271 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op8_addr16);
7272 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op8_addr32);
7273 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op8_addr64);
7274 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7275 }
7276 }
7277}
7278
7279
7280/** Opcode 0x6d. */
7281FNIEMOP_DEF(iemOp_inswd_Yv_DX)
7282{
7283 IEMOP_HLP_NO_LOCK_PREFIX();
7284 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
7285 {
7286 IEMOP_MNEMONIC("rep ins Yv,DX");
7287 switch (pIemCpu->enmEffOpSize)
7288 {
7289 case IEMMODE_16BIT:
7290 switch (pIemCpu->enmEffAddrMode)
7291 {
7292 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op16_addr16);
7293 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op16_addr32);
7294 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op16_addr64);
7295 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7296 }
7297 break;
7298 case IEMMODE_64BIT:
7299 case IEMMODE_32BIT:
7300 switch (pIemCpu->enmEffAddrMode)
7301 {
7302 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op32_addr16);
7303 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op32_addr32);
7304 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rep_ins_op32_addr64);
7305 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7306 }
7307 break;
7308 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7309 }
7310 }
7311 else
7312 {
7313 IEMOP_MNEMONIC("ins Yv,DX");
7314 switch (pIemCpu->enmEffOpSize)
7315 {
7316 case IEMMODE_16BIT:
7317 switch (pIemCpu->enmEffAddrMode)
7318 {
7319 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op16_addr16);
7320 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op16_addr32);
7321 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op16_addr64);
7322 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7323 }
7324 break;
7325 case IEMMODE_64BIT:
7326 case IEMMODE_32BIT:
7327 switch (pIemCpu->enmEffAddrMode)
7328 {
7329 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op32_addr16);
7330 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op32_addr32);
7331 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_ins_op32_addr64);
7332 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7333 }
7334 break;
7335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7336 }
7337 }
7338}
7339
7340
7341/** Opcode 0x6e. */
7342FNIEMOP_DEF(iemOp_outsb_Yb_DX)
7343{
7344 IEMOP_HLP_NO_LOCK_PREFIX();
7345 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7346 {
7347 IEMOP_MNEMONIC("rep out DX,Yb");
7348 switch (pIemCpu->enmEffAddrMode)
7349 {
7350 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op8_addr16, pIemCpu->iEffSeg);
7351 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op8_addr32, pIemCpu->iEffSeg);
7352 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op8_addr64, pIemCpu->iEffSeg);
7353 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7354 }
7355 }
7356 else
7357 {
7358 IEMOP_MNEMONIC("out DX,Yb");
7359 switch (pIemCpu->enmEffAddrMode)
7360 {
7361 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op8_addr16, pIemCpu->iEffSeg);
7362 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op8_addr32, pIemCpu->iEffSeg);
7363 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op8_addr64, pIemCpu->iEffSeg);
7364 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7365 }
7366 }
7367}
7368
7369
7370/** Opcode 0x6f. */
7371FNIEMOP_DEF(iemOp_outswd_Yv_DX)
7372{
7373 IEMOP_HLP_NO_LOCK_PREFIX();
7374 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
7375 {
7376 IEMOP_MNEMONIC("rep outs DX,Yv");
7377 switch (pIemCpu->enmEffOpSize)
7378 {
7379 case IEMMODE_16BIT:
7380 switch (pIemCpu->enmEffAddrMode)
7381 {
7382 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op16_addr16, pIemCpu->iEffSeg);
7383 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op16_addr32, pIemCpu->iEffSeg);
7384 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op16_addr64, pIemCpu->iEffSeg);
7385 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7386 }
7387 break;
7388 case IEMMODE_64BIT:
7389 case IEMMODE_32BIT:
7390 switch (pIemCpu->enmEffAddrMode)
7391 {
7392 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op32_addr16, pIemCpu->iEffSeg);
7393 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op32_addr32, pIemCpu->iEffSeg);
7394 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_outs_op32_addr64, pIemCpu->iEffSeg);
7395 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7396 }
7397 break;
7398 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7399 }
7400 }
7401 else
7402 {
7403 IEMOP_MNEMONIC("outs DX,Yv");
7404 switch (pIemCpu->enmEffOpSize)
7405 {
7406 case IEMMODE_16BIT:
7407 switch (pIemCpu->enmEffAddrMode)
7408 {
7409 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op16_addr16, pIemCpu->iEffSeg);
7410 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op16_addr32, pIemCpu->iEffSeg);
7411 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op16_addr64, pIemCpu->iEffSeg);
7412 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7413 }
7414 break;
7415 case IEMMODE_64BIT:
7416 case IEMMODE_32BIT:
7417 switch (pIemCpu->enmEffAddrMode)
7418 {
7419 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op32_addr16, pIemCpu->iEffSeg);
7420 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op32_addr32, pIemCpu->iEffSeg);
7421 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_outs_op32_addr64, pIemCpu->iEffSeg);
7422 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7423 }
7424 break;
7425 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7426 }
7427 }
7428}
7429
7430
7431/** Opcode 0x70. */
7432FNIEMOP_DEF(iemOp_jo_Jb)
7433{
7434 IEMOP_MNEMONIC("jo Jb");
7435 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7436 IEMOP_HLP_NO_LOCK_PREFIX();
7437 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7438
7439 IEM_MC_BEGIN(0, 0);
7440 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7441 IEM_MC_REL_JMP_S8(i8Imm);
7442 } IEM_MC_ELSE() {
7443 IEM_MC_ADVANCE_RIP();
7444 } IEM_MC_ENDIF();
7445 IEM_MC_END();
7446 return VINF_SUCCESS;
7447}
7448
7449
7450/** Opcode 0x71. */
7451FNIEMOP_DEF(iemOp_jno_Jb)
7452{
7453 IEMOP_MNEMONIC("jno Jb");
7454 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7455 IEMOP_HLP_NO_LOCK_PREFIX();
7456 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7457
7458 IEM_MC_BEGIN(0, 0);
7459 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
7460 IEM_MC_ADVANCE_RIP();
7461 } IEM_MC_ELSE() {
7462 IEM_MC_REL_JMP_S8(i8Imm);
7463 } IEM_MC_ENDIF();
7464 IEM_MC_END();
7465 return VINF_SUCCESS;
7466}
7467
7468/** Opcode 0x72. */
7469FNIEMOP_DEF(iemOp_jc_Jb)
7470{
7471 IEMOP_MNEMONIC("jc/jnae Jb");
7472 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7473 IEMOP_HLP_NO_LOCK_PREFIX();
7474 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7475
7476 IEM_MC_BEGIN(0, 0);
7477 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7478 IEM_MC_REL_JMP_S8(i8Imm);
7479 } IEM_MC_ELSE() {
7480 IEM_MC_ADVANCE_RIP();
7481 } IEM_MC_ENDIF();
7482 IEM_MC_END();
7483 return VINF_SUCCESS;
7484}
7485
7486
7487/** Opcode 0x73. */
7488FNIEMOP_DEF(iemOp_jnc_Jb)
7489{
7490 IEMOP_MNEMONIC("jnc/jnb Jb");
7491 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7492 IEMOP_HLP_NO_LOCK_PREFIX();
7493 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7494
7495 IEM_MC_BEGIN(0, 0);
7496 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7497 IEM_MC_ADVANCE_RIP();
7498 } IEM_MC_ELSE() {
7499 IEM_MC_REL_JMP_S8(i8Imm);
7500 } IEM_MC_ENDIF();
7501 IEM_MC_END();
7502 return VINF_SUCCESS;
7503}
7504
7505
7506/** Opcode 0x74. */
7507FNIEMOP_DEF(iemOp_je_Jb)
7508{
7509 IEMOP_MNEMONIC("je/jz Jb");
7510 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7511 IEMOP_HLP_NO_LOCK_PREFIX();
7512 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7513
7514 IEM_MC_BEGIN(0, 0);
7515 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7516 IEM_MC_REL_JMP_S8(i8Imm);
7517 } IEM_MC_ELSE() {
7518 IEM_MC_ADVANCE_RIP();
7519 } IEM_MC_ENDIF();
7520 IEM_MC_END();
7521 return VINF_SUCCESS;
7522}
7523
7524
7525/** Opcode 0x75. */
7526FNIEMOP_DEF(iemOp_jne_Jb)
7527{
7528 IEMOP_MNEMONIC("jne/jnz Jb");
7529 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7530 IEMOP_HLP_NO_LOCK_PREFIX();
7531 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7532
7533 IEM_MC_BEGIN(0, 0);
7534 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
7535 IEM_MC_ADVANCE_RIP();
7536 } IEM_MC_ELSE() {
7537 IEM_MC_REL_JMP_S8(i8Imm);
7538 } IEM_MC_ENDIF();
7539 IEM_MC_END();
7540 return VINF_SUCCESS;
7541}
7542
7543
7544/** Opcode 0x76. */
7545FNIEMOP_DEF(iemOp_jbe_Jb)
7546{
7547 IEMOP_MNEMONIC("jbe/jna Jb");
7548 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7549 IEMOP_HLP_NO_LOCK_PREFIX();
7550 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7551
7552 IEM_MC_BEGIN(0, 0);
7553 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7554 IEM_MC_REL_JMP_S8(i8Imm);
7555 } IEM_MC_ELSE() {
7556 IEM_MC_ADVANCE_RIP();
7557 } IEM_MC_ENDIF();
7558 IEM_MC_END();
7559 return VINF_SUCCESS;
7560}
7561
7562
7563/** Opcode 0x77. */
7564FNIEMOP_DEF(iemOp_jnbe_Jb)
7565{
7566 IEMOP_MNEMONIC("jnbe/ja Jb");
7567 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7568 IEMOP_HLP_NO_LOCK_PREFIX();
7569 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7570
7571 IEM_MC_BEGIN(0, 0);
7572 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
7573 IEM_MC_ADVANCE_RIP();
7574 } IEM_MC_ELSE() {
7575 IEM_MC_REL_JMP_S8(i8Imm);
7576 } IEM_MC_ENDIF();
7577 IEM_MC_END();
7578 return VINF_SUCCESS;
7579}
7580
7581
7582/** Opcode 0x78. */
7583FNIEMOP_DEF(iemOp_js_Jb)
7584{
7585 IEMOP_MNEMONIC("js Jb");
7586 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7587 IEMOP_HLP_NO_LOCK_PREFIX();
7588 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7589
7590 IEM_MC_BEGIN(0, 0);
7591 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7592 IEM_MC_REL_JMP_S8(i8Imm);
7593 } IEM_MC_ELSE() {
7594 IEM_MC_ADVANCE_RIP();
7595 } IEM_MC_ENDIF();
7596 IEM_MC_END();
7597 return VINF_SUCCESS;
7598}
7599
7600
7601/** Opcode 0x79. */
7602FNIEMOP_DEF(iemOp_jns_Jb)
7603{
7604 IEMOP_MNEMONIC("jns Jb");
7605 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7606 IEMOP_HLP_NO_LOCK_PREFIX();
7607 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7608
7609 IEM_MC_BEGIN(0, 0);
7610 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
7611 IEM_MC_ADVANCE_RIP();
7612 } IEM_MC_ELSE() {
7613 IEM_MC_REL_JMP_S8(i8Imm);
7614 } IEM_MC_ENDIF();
7615 IEM_MC_END();
7616 return VINF_SUCCESS;
7617}
7618
7619
7620/** Opcode 0x7a. */
7621FNIEMOP_DEF(iemOp_jp_Jb)
7622{
7623 IEMOP_MNEMONIC("jp Jb");
7624 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7625 IEMOP_HLP_NO_LOCK_PREFIX();
7626 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7627
7628 IEM_MC_BEGIN(0, 0);
7629 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7630 IEM_MC_REL_JMP_S8(i8Imm);
7631 } IEM_MC_ELSE() {
7632 IEM_MC_ADVANCE_RIP();
7633 } IEM_MC_ENDIF();
7634 IEM_MC_END();
7635 return VINF_SUCCESS;
7636}
7637
7638
7639/** Opcode 0x7b. */
7640FNIEMOP_DEF(iemOp_jnp_Jb)
7641{
7642 IEMOP_MNEMONIC("jnp Jb");
7643 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7644 IEMOP_HLP_NO_LOCK_PREFIX();
7645 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7646
7647 IEM_MC_BEGIN(0, 0);
7648 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
7649 IEM_MC_ADVANCE_RIP();
7650 } IEM_MC_ELSE() {
7651 IEM_MC_REL_JMP_S8(i8Imm);
7652 } IEM_MC_ENDIF();
7653 IEM_MC_END();
7654 return VINF_SUCCESS;
7655}
7656
7657
7658/** Opcode 0x7c. */
7659FNIEMOP_DEF(iemOp_jl_Jb)
7660{
7661 IEMOP_MNEMONIC("jl/jnge Jb");
7662 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7663 IEMOP_HLP_NO_LOCK_PREFIX();
7664 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7665
7666 IEM_MC_BEGIN(0, 0);
7667 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7668 IEM_MC_REL_JMP_S8(i8Imm);
7669 } IEM_MC_ELSE() {
7670 IEM_MC_ADVANCE_RIP();
7671 } IEM_MC_ENDIF();
7672 IEM_MC_END();
7673 return VINF_SUCCESS;
7674}
7675
7676
7677/** Opcode 0x7d. */
7678FNIEMOP_DEF(iemOp_jnl_Jb)
7679{
7680 IEMOP_MNEMONIC("jnl/jge Jb");
7681 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7682 IEMOP_HLP_NO_LOCK_PREFIX();
7683 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7684
7685 IEM_MC_BEGIN(0, 0);
7686 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
7687 IEM_MC_ADVANCE_RIP();
7688 } IEM_MC_ELSE() {
7689 IEM_MC_REL_JMP_S8(i8Imm);
7690 } IEM_MC_ENDIF();
7691 IEM_MC_END();
7692 return VINF_SUCCESS;
7693}
7694
7695
7696/** Opcode 0x7e. */
7697FNIEMOP_DEF(iemOp_jle_Jb)
7698{
7699 IEMOP_MNEMONIC("jle/jng Jb");
7700 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7701 IEMOP_HLP_NO_LOCK_PREFIX();
7702 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7703
7704 IEM_MC_BEGIN(0, 0);
7705 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7706 IEM_MC_REL_JMP_S8(i8Imm);
7707 } IEM_MC_ELSE() {
7708 IEM_MC_ADVANCE_RIP();
7709 } IEM_MC_ENDIF();
7710 IEM_MC_END();
7711 return VINF_SUCCESS;
7712}
7713
7714
7715/** Opcode 0x7f. */
7716FNIEMOP_DEF(iemOp_jnle_Jb)
7717{
7718 IEMOP_MNEMONIC("jnle/jg Jb");
7719 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
7720 IEMOP_HLP_NO_LOCK_PREFIX();
7721 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7722
7723 IEM_MC_BEGIN(0, 0);
7724 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
7725 IEM_MC_ADVANCE_RIP();
7726 } IEM_MC_ELSE() {
7727 IEM_MC_REL_JMP_S8(i8Imm);
7728 } IEM_MC_ENDIF();
7729 IEM_MC_END();
7730 return VINF_SUCCESS;
7731}
7732
7733
7734/** Opcode 0x80. */
7735FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
7736{
7737 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7738 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
7739 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
7740
7741 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7742 {
7743 /* register target */
7744 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7745 IEMOP_HLP_NO_LOCK_PREFIX();
7746 IEM_MC_BEGIN(3, 0);
7747 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7748 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
7749 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7750
7751 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7752 IEM_MC_REF_EFLAGS(pEFlags);
7753 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
7754
7755 IEM_MC_ADVANCE_RIP();
7756 IEM_MC_END();
7757 }
7758 else
7759 {
7760 /* memory target */
7761 uint32_t fAccess;
7762 if (pImpl->pfnLockedU8)
7763 fAccess = IEM_ACCESS_DATA_RW;
7764 else
7765 { /* CMP */
7766 IEMOP_HLP_NO_LOCK_PREFIX();
7767 fAccess = IEM_ACCESS_DATA_R;
7768 }
7769 IEM_MC_BEGIN(3, 2);
7770 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7771 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7773
7774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7775 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7776 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
7777
7778 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7779 IEM_MC_FETCH_EFLAGS(EFlags);
7780 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
7781 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
7782 else
7783 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
7784
7785 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
7786 IEM_MC_COMMIT_EFLAGS(EFlags);
7787 IEM_MC_ADVANCE_RIP();
7788 IEM_MC_END();
7789 }
7790 return VINF_SUCCESS;
7791}
7792
7793
7794/** Opcode 0x81. */
7795FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
7796{
7797 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7798 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
7799 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
7800
7801 switch (pIemCpu->enmEffOpSize)
7802 {
7803 case IEMMODE_16BIT:
7804 {
7805 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7806 {
7807 /* register target */
7808 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7809 IEMOP_HLP_NO_LOCK_PREFIX();
7810 IEM_MC_BEGIN(3, 0);
7811 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7812 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
7813 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7814
7815 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7816 IEM_MC_REF_EFLAGS(pEFlags);
7817 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7818
7819 IEM_MC_ADVANCE_RIP();
7820 IEM_MC_END();
7821 }
7822 else
7823 {
7824 /* memory target */
7825 uint32_t fAccess;
7826 if (pImpl->pfnLockedU16)
7827 fAccess = IEM_ACCESS_DATA_RW;
7828 else
7829 { /* CMP, TEST */
7830 IEMOP_HLP_NO_LOCK_PREFIX();
7831 fAccess = IEM_ACCESS_DATA_R;
7832 }
7833 IEM_MC_BEGIN(3, 2);
7834 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7835 IEM_MC_ARG(uint16_t, u16Src, 1);
7836 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7837 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7838
7839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7840 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7841 IEM_MC_ASSIGN(u16Src, u16Imm);
7842 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7843 IEM_MC_FETCH_EFLAGS(EFlags);
7844 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
7845 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
7846 else
7847 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
7848
7849 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
7850 IEM_MC_COMMIT_EFLAGS(EFlags);
7851 IEM_MC_ADVANCE_RIP();
7852 IEM_MC_END();
7853 }
7854 break;
7855 }
7856
7857 case IEMMODE_32BIT:
7858 {
7859 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7860 {
7861 /* register target */
7862 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7863 IEMOP_HLP_NO_LOCK_PREFIX();
7864 IEM_MC_BEGIN(3, 0);
7865 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7866 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
7867 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7868
7869 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7870 IEM_MC_REF_EFLAGS(pEFlags);
7871 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7872
7873 IEM_MC_ADVANCE_RIP();
7874 IEM_MC_END();
7875 }
7876 else
7877 {
7878 /* memory target */
7879 uint32_t fAccess;
7880 if (pImpl->pfnLockedU32)
7881 fAccess = IEM_ACCESS_DATA_RW;
7882 else
7883 { /* CMP, TEST */
7884 IEMOP_HLP_NO_LOCK_PREFIX();
7885 fAccess = IEM_ACCESS_DATA_R;
7886 }
7887 IEM_MC_BEGIN(3, 2);
7888 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7889 IEM_MC_ARG(uint32_t, u32Src, 1);
7890 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7892
7893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7894 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7895 IEM_MC_ASSIGN(u32Src, u32Imm);
7896 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7897 IEM_MC_FETCH_EFLAGS(EFlags);
7898 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
7899 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
7900 else
7901 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
7902
7903 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
7904 IEM_MC_COMMIT_EFLAGS(EFlags);
7905 IEM_MC_ADVANCE_RIP();
7906 IEM_MC_END();
7907 }
7908 break;
7909 }
7910
7911 case IEMMODE_64BIT:
7912 {
7913 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7914 {
7915 /* register target */
7916 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7917 IEMOP_HLP_NO_LOCK_PREFIX();
7918 IEM_MC_BEGIN(3, 0);
7919 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7920 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
7921 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7922
7923 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
7924 IEM_MC_REF_EFLAGS(pEFlags);
7925 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7926
7927 IEM_MC_ADVANCE_RIP();
7928 IEM_MC_END();
7929 }
7930 else
7931 {
7932 /* memory target */
7933 uint32_t fAccess;
7934 if (pImpl->pfnLockedU64)
7935 fAccess = IEM_ACCESS_DATA_RW;
7936 else
7937 { /* CMP */
7938 IEMOP_HLP_NO_LOCK_PREFIX();
7939 fAccess = IEM_ACCESS_DATA_R;
7940 }
7941 IEM_MC_BEGIN(3, 2);
7942 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7943 IEM_MC_ARG(uint64_t, u64Src, 1);
7944 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7946
7947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
7948 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7949 IEM_MC_ASSIGN(u64Src, u64Imm);
7950 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
7951 IEM_MC_FETCH_EFLAGS(EFlags);
7952 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
7953 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
7954 else
7955 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
7956
7957 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
7958 IEM_MC_COMMIT_EFLAGS(EFlags);
7959 IEM_MC_ADVANCE_RIP();
7960 IEM_MC_END();
7961 }
7962 break;
7963 }
7964 }
7965 return VINF_SUCCESS;
7966}
7967
7968
7969/** Opcode 0x82. */
7970FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
7971{
7972 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
7973 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
7974}
7975
7976
7977/** Opcode 0x83. */
7978FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
7979{
7980 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7981 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
7982 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
7983
7984 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7985 {
7986 /*
7987 * Register target
7988 */
7989 IEMOP_HLP_NO_LOCK_PREFIX();
7990 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7991 switch (pIemCpu->enmEffOpSize)
7992 {
7993 case IEMMODE_16BIT:
7994 {
7995 IEM_MC_BEGIN(3, 0);
7996 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7997 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
7998 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7999
8000 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8001 IEM_MC_REF_EFLAGS(pEFlags);
8002 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8003
8004 IEM_MC_ADVANCE_RIP();
8005 IEM_MC_END();
8006 break;
8007 }
8008
8009 case IEMMODE_32BIT:
8010 {
8011 IEM_MC_BEGIN(3, 0);
8012 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8013 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
8014 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8015
8016 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8017 IEM_MC_REF_EFLAGS(pEFlags);
8018 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8019
8020 IEM_MC_ADVANCE_RIP();
8021 IEM_MC_END();
8022 break;
8023 }
8024
8025 case IEMMODE_64BIT:
8026 {
8027 IEM_MC_BEGIN(3, 0);
8028 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8029 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
8030 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8031
8032 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8033 IEM_MC_REF_EFLAGS(pEFlags);
8034 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8035
8036 IEM_MC_ADVANCE_RIP();
8037 IEM_MC_END();
8038 break;
8039 }
8040 }
8041 }
8042 else
8043 {
8044 /*
8045 * Memory target.
8046 */
8047 uint32_t fAccess;
8048 if (pImpl->pfnLockedU16)
8049 fAccess = IEM_ACCESS_DATA_RW;
8050 else
8051 { /* CMP */
8052 IEMOP_HLP_NO_LOCK_PREFIX();
8053 fAccess = IEM_ACCESS_DATA_R;
8054 }
8055
8056 switch (pIemCpu->enmEffOpSize)
8057 {
8058 case IEMMODE_16BIT:
8059 {
8060 IEM_MC_BEGIN(3, 2);
8061 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8062 IEM_MC_ARG(uint16_t, u16Src, 1);
8063 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8065
8066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8067 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8068 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
8069 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8070 IEM_MC_FETCH_EFLAGS(EFlags);
8071 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
8072 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
8073 else
8074 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
8075
8076 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
8077 IEM_MC_COMMIT_EFLAGS(EFlags);
8078 IEM_MC_ADVANCE_RIP();
8079 IEM_MC_END();
8080 break;
8081 }
8082
8083 case IEMMODE_32BIT:
8084 {
8085 IEM_MC_BEGIN(3, 2);
8086 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8087 IEM_MC_ARG(uint32_t, u32Src, 1);
8088 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8090
8091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8092 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8093 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
8094 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8095 IEM_MC_FETCH_EFLAGS(EFlags);
8096 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
8097 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
8098 else
8099 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
8100
8101 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
8102 IEM_MC_COMMIT_EFLAGS(EFlags);
8103 IEM_MC_ADVANCE_RIP();
8104 IEM_MC_END();
8105 break;
8106 }
8107
8108 case IEMMODE_64BIT:
8109 {
8110 IEM_MC_BEGIN(3, 2);
8111 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8112 IEM_MC_ARG(uint64_t, u64Src, 1);
8113 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8115
8116 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8117 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8118 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
8119 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8120 IEM_MC_FETCH_EFLAGS(EFlags);
8121 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
8122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
8123 else
8124 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
8125
8126 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
8127 IEM_MC_COMMIT_EFLAGS(EFlags);
8128 IEM_MC_ADVANCE_RIP();
8129 IEM_MC_END();
8130 break;
8131 }
8132 }
8133 }
8134 return VINF_SUCCESS;
8135}
8136
8137
8138/** Opcode 0x84. */
8139FNIEMOP_DEF(iemOp_test_Eb_Gb)
8140{
8141 IEMOP_MNEMONIC("test Eb,Gb");
8142 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
8143 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8144 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
8145}
8146
8147
8148/** Opcode 0x85. */
8149FNIEMOP_DEF(iemOp_test_Ev_Gv)
8150{
8151 IEMOP_MNEMONIC("test Ev,Gv");
8152 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
8153 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
8154 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
8155}
8156
8157
8158/** Opcode 0x86. */
8159FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
8160{
8161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8162 IEMOP_MNEMONIC("xchg Eb,Gb");
8163
8164 /*
8165 * If rm is denoting a register, no more instruction bytes.
8166 */
8167 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8168 {
8169 IEMOP_HLP_NO_LOCK_PREFIX();
8170
8171 IEM_MC_BEGIN(0, 2);
8172 IEM_MC_LOCAL(uint8_t, uTmp1);
8173 IEM_MC_LOCAL(uint8_t, uTmp2);
8174
8175 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8176 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8177 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
8178 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
8179
8180 IEM_MC_ADVANCE_RIP();
8181 IEM_MC_END();
8182 }
8183 else
8184 {
8185 /*
8186 * We're accessing memory.
8187 */
8188/** @todo the register must be committed separately! */
8189 IEM_MC_BEGIN(2, 2);
8190 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
8191 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
8192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8193
8194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8195 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8196 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8197 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
8198 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
8199
8200 IEM_MC_ADVANCE_RIP();
8201 IEM_MC_END();
8202 }
8203 return VINF_SUCCESS;
8204}
8205
8206
8207/** Opcode 0x87. */
8208FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
8209{
8210 IEMOP_MNEMONIC("xchg Ev,Gv");
8211 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8212
8213 /*
8214 * If rm is denoting a register, no more instruction bytes.
8215 */
8216 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8217 {
8218 IEMOP_HLP_NO_LOCK_PREFIX();
8219
8220 switch (pIemCpu->enmEffOpSize)
8221 {
8222 case IEMMODE_16BIT:
8223 IEM_MC_BEGIN(0, 2);
8224 IEM_MC_LOCAL(uint16_t, uTmp1);
8225 IEM_MC_LOCAL(uint16_t, uTmp2);
8226
8227 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8228 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8229 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
8230 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
8231
8232 IEM_MC_ADVANCE_RIP();
8233 IEM_MC_END();
8234 return VINF_SUCCESS;
8235
8236 case IEMMODE_32BIT:
8237 IEM_MC_BEGIN(0, 2);
8238 IEM_MC_LOCAL(uint32_t, uTmp1);
8239 IEM_MC_LOCAL(uint32_t, uTmp2);
8240
8241 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8242 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8243 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
8244 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
8245
8246 IEM_MC_ADVANCE_RIP();
8247 IEM_MC_END();
8248 return VINF_SUCCESS;
8249
8250 case IEMMODE_64BIT:
8251 IEM_MC_BEGIN(0, 2);
8252 IEM_MC_LOCAL(uint64_t, uTmp1);
8253 IEM_MC_LOCAL(uint64_t, uTmp2);
8254
8255 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8256 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8257 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
8258 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
8259
8260 IEM_MC_ADVANCE_RIP();
8261 IEM_MC_END();
8262 return VINF_SUCCESS;
8263
8264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8265 }
8266 }
8267 else
8268 {
8269 /*
8270 * We're accessing memory.
8271 */
8272 switch (pIemCpu->enmEffOpSize)
8273 {
8274/** @todo the register must be committed separately! */
8275 case IEMMODE_16BIT:
8276 IEM_MC_BEGIN(2, 2);
8277 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
8278 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
8279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8280
8281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8282 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8283 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8284 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
8285 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
8286
8287 IEM_MC_ADVANCE_RIP();
8288 IEM_MC_END();
8289 return VINF_SUCCESS;
8290
8291 case IEMMODE_32BIT:
8292 IEM_MC_BEGIN(2, 2);
8293 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
8294 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
8295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8296
8297 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8298 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8299 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8300 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
8301 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
8302
8303 IEM_MC_ADVANCE_RIP();
8304 IEM_MC_END();
8305 return VINF_SUCCESS;
8306
8307 case IEMMODE_64BIT:
8308 IEM_MC_BEGIN(2, 2);
8309 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
8310 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
8311 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8312
8313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8314 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8315 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8316 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
8317 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
8318
8319 IEM_MC_ADVANCE_RIP();
8320 IEM_MC_END();
8321 return VINF_SUCCESS;
8322
8323 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8324 }
8325 }
8326}
8327
8328
8329/** Opcode 0x88. */
8330FNIEMOP_DEF(iemOp_mov_Eb_Gb)
8331{
8332 IEMOP_MNEMONIC("mov Eb,Gb");
8333
8334 uint8_t bRm;
8335 IEM_OPCODE_GET_NEXT_U8(&bRm);
8336 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8337
8338 /*
8339 * If rm is denoting a register, no more instruction bytes.
8340 */
8341 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8342 {
8343 IEM_MC_BEGIN(0, 1);
8344 IEM_MC_LOCAL(uint8_t, u8Value);
8345 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8346 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Value);
8347 IEM_MC_ADVANCE_RIP();
8348 IEM_MC_END();
8349 }
8350 else
8351 {
8352 /*
8353 * We're writing a register to memory.
8354 */
8355 IEM_MC_BEGIN(0, 2);
8356 IEM_MC_LOCAL(uint8_t, u8Value);
8357 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8359 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8360 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Value);
8361 IEM_MC_ADVANCE_RIP();
8362 IEM_MC_END();
8363 }
8364 return VINF_SUCCESS;
8365
8366}
8367
8368
8369/** Opcode 0x89. */
8370FNIEMOP_DEF(iemOp_mov_Ev_Gv)
8371{
8372 IEMOP_MNEMONIC("mov Ev,Gv");
8373
8374 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8375 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8376
8377 /*
8378 * If rm is denoting a register, no more instruction bytes.
8379 */
8380 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8381 {
8382 switch (pIemCpu->enmEffOpSize)
8383 {
8384 case IEMMODE_16BIT:
8385 IEM_MC_BEGIN(0, 1);
8386 IEM_MC_LOCAL(uint16_t, u16Value);
8387 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8388 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
8389 IEM_MC_ADVANCE_RIP();
8390 IEM_MC_END();
8391 break;
8392
8393 case IEMMODE_32BIT:
8394 IEM_MC_BEGIN(0, 1);
8395 IEM_MC_LOCAL(uint32_t, u32Value);
8396 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8397 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
8398 IEM_MC_ADVANCE_RIP();
8399 IEM_MC_END();
8400 break;
8401
8402 case IEMMODE_64BIT:
8403 IEM_MC_BEGIN(0, 1);
8404 IEM_MC_LOCAL(uint64_t, u64Value);
8405 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8406 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
8407 IEM_MC_ADVANCE_RIP();
8408 IEM_MC_END();
8409 break;
8410 }
8411 }
8412 else
8413 {
8414 /*
8415 * We're writing a register to memory.
8416 */
8417 switch (pIemCpu->enmEffOpSize)
8418 {
8419 case IEMMODE_16BIT:
8420 IEM_MC_BEGIN(0, 2);
8421 IEM_MC_LOCAL(uint16_t, u16Value);
8422 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8424 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8425 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
8426 IEM_MC_ADVANCE_RIP();
8427 IEM_MC_END();
8428 break;
8429
8430 case IEMMODE_32BIT:
8431 IEM_MC_BEGIN(0, 2);
8432 IEM_MC_LOCAL(uint32_t, u32Value);
8433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8435 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8436 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
8437 IEM_MC_ADVANCE_RIP();
8438 IEM_MC_END();
8439 break;
8440
8441 case IEMMODE_64BIT:
8442 IEM_MC_BEGIN(0, 2);
8443 IEM_MC_LOCAL(uint64_t, u64Value);
8444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8446 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
8447 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
8448 IEM_MC_ADVANCE_RIP();
8449 IEM_MC_END();
8450 break;
8451 }
8452 }
8453 return VINF_SUCCESS;
8454}
8455
8456
8457/** Opcode 0x8a. */
8458FNIEMOP_DEF(iemOp_mov_Gb_Eb)
8459{
8460 IEMOP_MNEMONIC("mov Gb,Eb");
8461
8462 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8463 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8464
8465 /*
8466 * If rm is denoting a register, no more instruction bytes.
8467 */
8468 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8469 {
8470 IEM_MC_BEGIN(0, 1);
8471 IEM_MC_LOCAL(uint8_t, u8Value);
8472 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8473 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
8474 IEM_MC_ADVANCE_RIP();
8475 IEM_MC_END();
8476 }
8477 else
8478 {
8479 /*
8480 * We're loading a register from memory.
8481 */
8482 IEM_MC_BEGIN(0, 2);
8483 IEM_MC_LOCAL(uint8_t, u8Value);
8484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8486 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
8487 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
8488 IEM_MC_ADVANCE_RIP();
8489 IEM_MC_END();
8490 }
8491 return VINF_SUCCESS;
8492}
8493
8494
8495/** Opcode 0x8b. */
8496FNIEMOP_DEF(iemOp_mov_Gv_Ev)
8497{
8498 IEMOP_MNEMONIC("mov Gv,Ev");
8499
8500 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8501 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8502
8503 /*
8504 * If rm is denoting a register, no more instruction bytes.
8505 */
8506 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8507 {
8508 switch (pIemCpu->enmEffOpSize)
8509 {
8510 case IEMMODE_16BIT:
8511 IEM_MC_BEGIN(0, 1);
8512 IEM_MC_LOCAL(uint16_t, u16Value);
8513 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8514 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
8515 IEM_MC_ADVANCE_RIP();
8516 IEM_MC_END();
8517 break;
8518
8519 case IEMMODE_32BIT:
8520 IEM_MC_BEGIN(0, 1);
8521 IEM_MC_LOCAL(uint32_t, u32Value);
8522 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8523 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
8524 IEM_MC_ADVANCE_RIP();
8525 IEM_MC_END();
8526 break;
8527
8528 case IEMMODE_64BIT:
8529 IEM_MC_BEGIN(0, 1);
8530 IEM_MC_LOCAL(uint64_t, u64Value);
8531 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8532 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8533 IEM_MC_ADVANCE_RIP();
8534 IEM_MC_END();
8535 break;
8536 }
8537 }
8538 else
8539 {
8540 /*
8541 * We're loading a register from memory.
8542 */
8543 switch (pIemCpu->enmEffOpSize)
8544 {
8545 case IEMMODE_16BIT:
8546 IEM_MC_BEGIN(0, 2);
8547 IEM_MC_LOCAL(uint16_t, u16Value);
8548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8550 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
8551 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
8552 IEM_MC_ADVANCE_RIP();
8553 IEM_MC_END();
8554 break;
8555
8556 case IEMMODE_32BIT:
8557 IEM_MC_BEGIN(0, 2);
8558 IEM_MC_LOCAL(uint32_t, u32Value);
8559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8561 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
8562 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
8563 IEM_MC_ADVANCE_RIP();
8564 IEM_MC_END();
8565 break;
8566
8567 case IEMMODE_64BIT:
8568 IEM_MC_BEGIN(0, 2);
8569 IEM_MC_LOCAL(uint64_t, u64Value);
8570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8572 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
8573 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8574 IEM_MC_ADVANCE_RIP();
8575 IEM_MC_END();
8576 break;
8577 }
8578 }
8579 return VINF_SUCCESS;
8580}
8581
8582
8583/** Opcode 0x8c. */
8584FNIEMOP_DEF(iemOp_mov_Ev_Sw)
8585{
8586 IEMOP_MNEMONIC("mov Ev,Sw");
8587
8588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8589 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8590
8591 /*
8592 * Check that the destination register exists. The REX.R prefix is ignored.
8593 */
8594 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8595 if ( iSegReg > X86_SREG_GS)
8596 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8597
8598 /*
8599 * If rm is denoting a register, no more instruction bytes.
8600 * In that case, the operand size is respected and the upper bits are
8601 * cleared (starting with some pentium).
8602 */
8603 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8604 {
8605 switch (pIemCpu->enmEffOpSize)
8606 {
8607 case IEMMODE_16BIT:
8608 IEM_MC_BEGIN(0, 1);
8609 IEM_MC_LOCAL(uint16_t, u16Value);
8610 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
8611 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
8612 IEM_MC_ADVANCE_RIP();
8613 IEM_MC_END();
8614 break;
8615
8616 case IEMMODE_32BIT:
8617 IEM_MC_BEGIN(0, 1);
8618 IEM_MC_LOCAL(uint32_t, u32Value);
8619 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
8620 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
8621 IEM_MC_ADVANCE_RIP();
8622 IEM_MC_END();
8623 break;
8624
8625 case IEMMODE_64BIT:
8626 IEM_MC_BEGIN(0, 1);
8627 IEM_MC_LOCAL(uint64_t, u64Value);
8628 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
8629 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
8630 IEM_MC_ADVANCE_RIP();
8631 IEM_MC_END();
8632 break;
8633 }
8634 }
8635 else
8636 {
8637 /*
8638 * We're saving the register to memory. The access is word sized
8639 * regardless of operand size prefixes.
8640 */
8641#if 0 /* not necessary */
8642 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
8643#endif
8644 IEM_MC_BEGIN(0, 2);
8645 IEM_MC_LOCAL(uint16_t, u16Value);
8646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8648 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
8649 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
8650 IEM_MC_ADVANCE_RIP();
8651 IEM_MC_END();
8652 }
8653 return VINF_SUCCESS;
8654}
8655
8656
8657
8658
8659/** Opcode 0x8d. */
8660FNIEMOP_DEF(iemOp_lea_Gv_M)
8661{
8662 IEMOP_MNEMONIC("lea Gv,M");
8663 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8664 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8665 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8666 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); /* no register form */
8667
8668 switch (pIemCpu->enmEffOpSize)
8669 {
8670 case IEMMODE_16BIT:
8671 IEM_MC_BEGIN(0, 2);
8672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8673 IEM_MC_LOCAL(uint16_t, u16Cast);
8674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
8675 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
8676 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Cast);
8677 IEM_MC_ADVANCE_RIP();
8678 IEM_MC_END();
8679 return VINF_SUCCESS;
8680
8681 case IEMMODE_32BIT:
8682 IEM_MC_BEGIN(0, 2);
8683 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8684 IEM_MC_LOCAL(uint32_t, u32Cast);
8685 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
8686 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
8687 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Cast);
8688 IEM_MC_ADVANCE_RIP();
8689 IEM_MC_END();
8690 return VINF_SUCCESS;
8691
8692 case IEMMODE_64BIT:
8693 IEM_MC_BEGIN(0, 1);
8694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
8696 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
8697 IEM_MC_ADVANCE_RIP();
8698 IEM_MC_END();
8699 return VINF_SUCCESS;
8700 }
8701 AssertFailedReturn(VERR_INTERNAL_ERROR_5);
8702}
8703
8704
8705/** Opcode 0x8e. */
8706FNIEMOP_DEF(iemOp_mov_Sw_Ev)
8707{
8708 IEMOP_MNEMONIC("mov Sw,Ev");
8709
8710 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8711 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8712
8713 /*
8714 * The practical operand size is 16-bit.
8715 */
8716#if 0 /* not necessary */
8717 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
8718#endif
8719
8720 /*
8721 * Check that the destination register exists and can be used with this
8722 * instruction. The REX.R prefix is ignored.
8723 */
8724 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8725 if ( iSegReg == X86_SREG_CS
8726 || iSegReg > X86_SREG_GS)
8727 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8728
8729 /*
8730 * If rm is denoting a register, no more instruction bytes.
8731 */
8732 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8733 {
8734 IEM_MC_BEGIN(2, 0);
8735 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
8736 IEM_MC_ARG(uint16_t, u16Value, 1);
8737 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8738 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
8739 IEM_MC_END();
8740 }
8741 else
8742 {
8743 /*
8744 * We're loading the register from memory. The access is word sized
8745 * regardless of operand size prefixes.
8746 */
8747 IEM_MC_BEGIN(2, 1);
8748 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
8749 IEM_MC_ARG(uint16_t, u16Value, 1);
8750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
8752 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
8753 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
8754 IEM_MC_END();
8755 }
8756 return VINF_SUCCESS;
8757}
8758
8759
8760/** Opcode 0x8f /0. */
8761FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
8762{
8763 /* This bugger is rather annoying as it requires rSP to be updated before
8764 doing the effective address calculations. Will eventually require a
8765 split between the R/M+SIB decoding and the effective address
8766 calculation - which is something that is required for any attempt at
8767 reusing this code for a recompiler. It may also be good to have if we
8768 need to delay #UD exception caused by invalid lock prefixes.
8769
8770 For now, we'll do a mostly safe interpreter-only implementation here. */
8771 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
8772 * now until tests show it's checked.. */
8773 IEMOP_MNEMONIC("pop Ev");
8774 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
8775
8776 /* Register access is relatively easy and can share code. */
8777 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8778 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8779
8780 /*
8781 * Memory target.
8782 *
8783 * Intel says that RSP is incremented before it's used in any effective
8784 * address calcuations. This means some serious extra annoyance here since
8785 * we decode and calculate the effective address in one step and like to
8786 * delay committing registers till everything is done.
8787 *
8788 * So, we'll decode and calculate the effective address twice. This will
8789 * require some recoding if turned into a recompiler.
8790 */
8791 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
8792
8793#ifndef TST_IEM_CHECK_MC
8794 /* Calc effective address with modified ESP. */
8795 uint8_t const offOpcodeSaved = pIemCpu->offOpcode;
8796 RTGCPTR GCPtrEff;
8797 VBOXSTRICTRC rcStrict;
8798 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, &GCPtrEff);
8799 if (rcStrict != VINF_SUCCESS)
8800 return rcStrict;
8801 pIemCpu->offOpcode = offOpcodeSaved;
8802
8803 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx);
8804 uint64_t const RspSaved = pCtx->rsp;
8805 switch (pIemCpu->enmEffOpSize)
8806 {
8807 case IEMMODE_16BIT: iemRegAddToRsp(pCtx, 2); break;
8808 case IEMMODE_32BIT: iemRegAddToRsp(pCtx, 4); break;
8809 case IEMMODE_64BIT: iemRegAddToRsp(pCtx, 8); break;
8810 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8811 }
8812 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, &GCPtrEff);
8813 Assert(rcStrict == VINF_SUCCESS);
8814 pCtx->rsp = RspSaved;
8815
8816 /* Perform the operation - this should be CImpl. */
8817 RTUINT64U TmpRsp;
8818 TmpRsp.u = pCtx->rsp;
8819 switch (pIemCpu->enmEffOpSize)
8820 {
8821 case IEMMODE_16BIT:
8822 {
8823 uint16_t u16Value;
8824 rcStrict = iemMemStackPopU16Ex(pIemCpu, &u16Value, &TmpRsp);
8825 if (rcStrict == VINF_SUCCESS)
8826 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u16Value);
8827 break;
8828 }
8829
8830 case IEMMODE_32BIT:
8831 {
8832 uint32_t u32Value;
8833 rcStrict = iemMemStackPopU32Ex(pIemCpu, &u32Value, &TmpRsp);
8834 if (rcStrict == VINF_SUCCESS)
8835 rcStrict = iemMemStoreDataU32(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u32Value);
8836 break;
8837 }
8838
8839 case IEMMODE_64BIT:
8840 {
8841 uint64_t u64Value;
8842 rcStrict = iemMemStackPopU64Ex(pIemCpu, &u64Value, &TmpRsp);
8843 if (rcStrict == VINF_SUCCESS)
8844 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u64Value);
8845 break;
8846 }
8847
8848 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8849 }
8850 if (rcStrict == VINF_SUCCESS)
8851 {
8852 pCtx->rsp = TmpRsp.u;
8853 iemRegUpdateRip(pIemCpu);
8854 }
8855 return rcStrict;
8856
8857#else
8858 return VERR_IEM_IPE_2;
8859#endif
8860}
8861
8862
8863/** Opcode 0x8f. */
8864FNIEMOP_DEF(iemOp_Grp1A)
8865{
8866 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8867 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only pop Ev in this group. */
8868 return IEMOP_RAISE_INVALID_OPCODE();
8869 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
8870}
8871
8872
8873/**
8874 * Common 'xchg reg,rAX' helper.
8875 */
8876FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
8877{
8878 IEMOP_HLP_NO_LOCK_PREFIX();
8879
8880 iReg |= pIemCpu->uRexB;
8881 switch (pIemCpu->enmEffOpSize)
8882 {
8883 case IEMMODE_16BIT:
8884 IEM_MC_BEGIN(0, 2);
8885 IEM_MC_LOCAL(uint16_t, u16Tmp1);
8886 IEM_MC_LOCAL(uint16_t, u16Tmp2);
8887 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
8888 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
8889 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
8890 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
8891 IEM_MC_ADVANCE_RIP();
8892 IEM_MC_END();
8893 return VINF_SUCCESS;
8894
8895 case IEMMODE_32BIT:
8896 IEM_MC_BEGIN(0, 2);
8897 IEM_MC_LOCAL(uint32_t, u32Tmp1);
8898 IEM_MC_LOCAL(uint32_t, u32Tmp2);
8899 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
8900 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
8901 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
8902 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
8903 IEM_MC_ADVANCE_RIP();
8904 IEM_MC_END();
8905 return VINF_SUCCESS;
8906
8907 case IEMMODE_64BIT:
8908 IEM_MC_BEGIN(0, 2);
8909 IEM_MC_LOCAL(uint64_t, u64Tmp1);
8910 IEM_MC_LOCAL(uint64_t, u64Tmp2);
8911 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
8912 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
8913 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
8914 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
8915 IEM_MC_ADVANCE_RIP();
8916 IEM_MC_END();
8917 return VINF_SUCCESS;
8918
8919 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8920 }
8921}
8922
8923
8924/** Opcode 0x90. */
8925FNIEMOP_DEF(iemOp_nop)
8926{
8927 /* R8/R8D and RAX/EAX can be exchanged. */
8928 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_B)
8929 {
8930 IEMOP_MNEMONIC("xchg r8,rAX");
8931 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
8932 }
8933
8934 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
8935 IEMOP_MNEMONIC("pause");
8936 else
8937 IEMOP_MNEMONIC("nop");
8938 IEM_MC_BEGIN(0, 0);
8939 IEM_MC_ADVANCE_RIP();
8940 IEM_MC_END();
8941 return VINF_SUCCESS;
8942}
8943
8944
8945/** Opcode 0x91. */
8946FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
8947{
8948 IEMOP_MNEMONIC("xchg rCX,rAX");
8949 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
8950}
8951
8952
8953/** Opcode 0x92. */
8954FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
8955{
8956 IEMOP_MNEMONIC("xchg rDX,rAX");
8957 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
8958}
8959
8960
8961/** Opcode 0x93. */
8962FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
8963{
8964 IEMOP_MNEMONIC("xchg rBX,rAX");
8965 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
8966}
8967
8968
8969/** Opcode 0x94. */
8970FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
8971{
8972 IEMOP_MNEMONIC("xchg rSX,rAX");
8973 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
8974}
8975
8976
8977/** Opcode 0x95. */
8978FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
8979{
8980 IEMOP_MNEMONIC("xchg rBP,rAX");
8981 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
8982}
8983
8984
8985/** Opcode 0x96. */
8986FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
8987{
8988 IEMOP_MNEMONIC("xchg rSI,rAX");
8989 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
8990}
8991
8992
8993/** Opcode 0x97. */
8994FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
8995{
8996 IEMOP_MNEMONIC("xchg rDI,rAX");
8997 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
8998}
8999
9000
9001/** Opcode 0x98. */
9002FNIEMOP_DEF(iemOp_cbw)
9003{
9004 IEMOP_HLP_NO_LOCK_PREFIX();
9005 switch (pIemCpu->enmEffOpSize)
9006 {
9007 case IEMMODE_16BIT:
9008 IEMOP_MNEMONIC("cbw");
9009 IEM_MC_BEGIN(0, 1);
9010 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
9011 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
9012 } IEM_MC_ELSE() {
9013 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
9014 } IEM_MC_ENDIF();
9015 IEM_MC_ADVANCE_RIP();
9016 IEM_MC_END();
9017 return VINF_SUCCESS;
9018
9019 case IEMMODE_32BIT:
9020 IEMOP_MNEMONIC("cwde");
9021 IEM_MC_BEGIN(0, 1);
9022 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
9023 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
9024 } IEM_MC_ELSE() {
9025 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
9026 } IEM_MC_ENDIF();
9027 IEM_MC_ADVANCE_RIP();
9028 IEM_MC_END();
9029 return VINF_SUCCESS;
9030
9031 case IEMMODE_64BIT:
9032 IEMOP_MNEMONIC("cdqe");
9033 IEM_MC_BEGIN(0, 1);
9034 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
9035 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
9036 } IEM_MC_ELSE() {
9037 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
9038 } IEM_MC_ENDIF();
9039 IEM_MC_ADVANCE_RIP();
9040 IEM_MC_END();
9041 return VINF_SUCCESS;
9042
9043 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9044 }
9045}
9046
9047
9048/** Opcode 0x99. */
9049FNIEMOP_DEF(iemOp_cwd)
9050{
9051 IEMOP_HLP_NO_LOCK_PREFIX();
9052 switch (pIemCpu->enmEffOpSize)
9053 {
9054 case IEMMODE_16BIT:
9055 IEMOP_MNEMONIC("cwd");
9056 IEM_MC_BEGIN(0, 1);
9057 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
9058 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
9059 } IEM_MC_ELSE() {
9060 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
9061 } IEM_MC_ENDIF();
9062 IEM_MC_ADVANCE_RIP();
9063 IEM_MC_END();
9064 return VINF_SUCCESS;
9065
9066 case IEMMODE_32BIT:
9067 IEMOP_MNEMONIC("cdq");
9068 IEM_MC_BEGIN(0, 1);
9069 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
9070 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
9071 } IEM_MC_ELSE() {
9072 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
9073 } IEM_MC_ENDIF();
9074 IEM_MC_ADVANCE_RIP();
9075 IEM_MC_END();
9076 return VINF_SUCCESS;
9077
9078 case IEMMODE_64BIT:
9079 IEMOP_MNEMONIC("cqo");
9080 IEM_MC_BEGIN(0, 1);
9081 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
9082 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
9083 } IEM_MC_ELSE() {
9084 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
9085 } IEM_MC_ENDIF();
9086 IEM_MC_ADVANCE_RIP();
9087 IEM_MC_END();
9088 return VINF_SUCCESS;
9089
9090 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9091 }
9092}
9093
9094
9095/** Opcode 0x9a. */
9096FNIEMOP_DEF(iemOp_call_Ap)
9097{
9098 IEMOP_MNEMONIC("call Ap");
9099 IEMOP_HLP_NO_64BIT();
9100
9101 /* Decode the far pointer address and pass it on to the far call C implementation. */
9102 uint32_t offSeg;
9103 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
9104 IEM_OPCODE_GET_NEXT_U32(&offSeg);
9105 else
9106 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
9107 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
9108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9109 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pIemCpu->enmEffOpSize);
9110}
9111
9112
9113/** Opcode 0x9b. (aka fwait) */
9114FNIEMOP_DEF(iemOp_wait)
9115{
9116 IEMOP_MNEMONIC("wait");
9117 IEMOP_HLP_NO_LOCK_PREFIX();
9118
9119 IEM_MC_BEGIN(0, 0);
9120 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9121 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9122 IEM_MC_ADVANCE_RIP();
9123 IEM_MC_END();
9124 return VINF_SUCCESS;
9125}
9126
9127
9128/** Opcode 0x9c. */
9129FNIEMOP_DEF(iemOp_pushf_Fv)
9130{
9131 IEMOP_HLP_NO_LOCK_PREFIX();
9132 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9133 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pIemCpu->enmEffOpSize);
9134}
9135
9136
9137/** Opcode 0x9d. */
9138FNIEMOP_DEF(iemOp_popf_Fv)
9139{
9140 IEMOP_HLP_NO_LOCK_PREFIX();
9141 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9142 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pIemCpu->enmEffOpSize);
9143}
9144
9145
9146/** Opcode 0x9e. */
9147FNIEMOP_DEF(iemOp_sahf)
9148{
9149 IEMOP_MNEMONIC("sahf");
9150 IEMOP_HLP_NO_LOCK_PREFIX();
9151 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
9152 && !IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_EXT_FEATURE_ECX_LAHF_SAHF))
9153 return IEMOP_RAISE_INVALID_OPCODE();
9154 IEM_MC_BEGIN(0, 2);
9155 IEM_MC_LOCAL(uint32_t, u32Flags);
9156 IEM_MC_LOCAL(uint32_t, EFlags);
9157 IEM_MC_FETCH_EFLAGS(EFlags);
9158 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
9159 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
9160 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
9161 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
9162 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
9163 IEM_MC_COMMIT_EFLAGS(EFlags);
9164 IEM_MC_ADVANCE_RIP();
9165 IEM_MC_END();
9166 return VINF_SUCCESS;
9167}
9168
9169
9170/** Opcode 0x9f. */
9171FNIEMOP_DEF(iemOp_lahf)
9172{
9173 IEMOP_MNEMONIC("lahf");
9174 IEMOP_HLP_NO_LOCK_PREFIX();
9175 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
9176 && !IEM_IS_AMD_CPUID_FEATURE_PRESENT_ECX(X86_CPUID_EXT_FEATURE_ECX_LAHF_SAHF))
9177 return IEMOP_RAISE_INVALID_OPCODE();
9178 IEM_MC_BEGIN(0, 1);
9179 IEM_MC_LOCAL(uint8_t, u8Flags);
9180 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
9181 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
9182 IEM_MC_ADVANCE_RIP();
9183 IEM_MC_END();
9184 return VINF_SUCCESS;
9185}
9186
9187
9188/**
9189 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
9190 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
9191 * prefixes. Will return on failures.
9192 * @param a_GCPtrMemOff The variable to store the offset in.
9193 */
9194#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
9195 do \
9196 { \
9197 switch (pIemCpu->enmEffAddrMode) \
9198 { \
9199 case IEMMODE_16BIT: \
9200 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
9201 break; \
9202 case IEMMODE_32BIT: \
9203 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
9204 break; \
9205 case IEMMODE_64BIT: \
9206 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
9207 break; \
9208 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9209 } \
9210 IEMOP_HLP_NO_LOCK_PREFIX(); \
9211 } while (0)
9212
9213/** Opcode 0xa0. */
9214FNIEMOP_DEF(iemOp_mov_Al_Ob)
9215{
9216 /*
9217 * Get the offset and fend of lock prefixes.
9218 */
9219 RTGCPTR GCPtrMemOff;
9220 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
9221
9222 /*
9223 * Fetch AL.
9224 */
9225 IEM_MC_BEGIN(0,1);
9226 IEM_MC_LOCAL(uint8_t, u8Tmp);
9227 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
9228 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9229 IEM_MC_ADVANCE_RIP();
9230 IEM_MC_END();
9231 return VINF_SUCCESS;
9232}
9233
9234
9235/** Opcode 0xa1. */
9236FNIEMOP_DEF(iemOp_mov_rAX_Ov)
9237{
9238 /*
9239 * Get the offset and fend of lock prefixes.
9240 */
9241 IEMOP_MNEMONIC("mov rAX,Ov");
9242 RTGCPTR GCPtrMemOff;
9243 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
9244
9245 /*
9246 * Fetch rAX.
9247 */
9248 switch (pIemCpu->enmEffOpSize)
9249 {
9250 case IEMMODE_16BIT:
9251 IEM_MC_BEGIN(0,1);
9252 IEM_MC_LOCAL(uint16_t, u16Tmp);
9253 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
9254 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
9255 IEM_MC_ADVANCE_RIP();
9256 IEM_MC_END();
9257 return VINF_SUCCESS;
9258
9259 case IEMMODE_32BIT:
9260 IEM_MC_BEGIN(0,1);
9261 IEM_MC_LOCAL(uint32_t, u32Tmp);
9262 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
9263 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
9264 IEM_MC_ADVANCE_RIP();
9265 IEM_MC_END();
9266 return VINF_SUCCESS;
9267
9268 case IEMMODE_64BIT:
9269 IEM_MC_BEGIN(0,1);
9270 IEM_MC_LOCAL(uint64_t, u64Tmp);
9271 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
9272 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
9273 IEM_MC_ADVANCE_RIP();
9274 IEM_MC_END();
9275 return VINF_SUCCESS;
9276
9277 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9278 }
9279}
9280
9281
9282/** Opcode 0xa2. */
9283FNIEMOP_DEF(iemOp_mov_Ob_AL)
9284{
9285 /*
9286 * Get the offset and fend of lock prefixes.
9287 */
9288 RTGCPTR GCPtrMemOff;
9289 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
9290
9291 /*
9292 * Store AL.
9293 */
9294 IEM_MC_BEGIN(0,1);
9295 IEM_MC_LOCAL(uint8_t, u8Tmp);
9296 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
9297 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrMemOff, u8Tmp);
9298 IEM_MC_ADVANCE_RIP();
9299 IEM_MC_END();
9300 return VINF_SUCCESS;
9301}
9302
9303
9304/** Opcode 0xa3. */
9305FNIEMOP_DEF(iemOp_mov_Ov_rAX)
9306{
9307 /*
9308 * Get the offset and fend of lock prefixes.
9309 */
9310 RTGCPTR GCPtrMemOff;
9311 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
9312
9313 /*
9314 * Store rAX.
9315 */
9316 switch (pIemCpu->enmEffOpSize)
9317 {
9318 case IEMMODE_16BIT:
9319 IEM_MC_BEGIN(0,1);
9320 IEM_MC_LOCAL(uint16_t, u16Tmp);
9321 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
9322 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrMemOff, u16Tmp);
9323 IEM_MC_ADVANCE_RIP();
9324 IEM_MC_END();
9325 return VINF_SUCCESS;
9326
9327 case IEMMODE_32BIT:
9328 IEM_MC_BEGIN(0,1);
9329 IEM_MC_LOCAL(uint32_t, u32Tmp);
9330 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
9331 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrMemOff, u32Tmp);
9332 IEM_MC_ADVANCE_RIP();
9333 IEM_MC_END();
9334 return VINF_SUCCESS;
9335
9336 case IEMMODE_64BIT:
9337 IEM_MC_BEGIN(0,1);
9338 IEM_MC_LOCAL(uint64_t, u64Tmp);
9339 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
9340 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrMemOff, u64Tmp);
9341 IEM_MC_ADVANCE_RIP();
9342 IEM_MC_END();
9343 return VINF_SUCCESS;
9344
9345 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9346 }
9347}
9348
9349/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
9350#define IEM_MOVS_CASE(ValBits, AddrBits) \
9351 IEM_MC_BEGIN(0, 2); \
9352 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
9353 IEM_MC_LOCAL(RTGCPTR, uAddr); \
9354 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
9355 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
9356 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
9357 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
9358 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
9359 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9360 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
9361 } IEM_MC_ELSE() { \
9362 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9363 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
9364 } IEM_MC_ENDIF(); \
9365 IEM_MC_ADVANCE_RIP(); \
9366 IEM_MC_END();
9367
9368/** Opcode 0xa4. */
9369FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
9370{
9371 IEMOP_HLP_NO_LOCK_PREFIX();
9372
9373 /*
9374 * Use the C implementation if a repeat prefix is encountered.
9375 */
9376 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9377 {
9378 IEMOP_MNEMONIC("rep movsb Xb,Yb");
9379 switch (pIemCpu->enmEffAddrMode)
9380 {
9381 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pIemCpu->iEffSeg);
9382 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pIemCpu->iEffSeg);
9383 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pIemCpu->iEffSeg);
9384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9385 }
9386 }
9387 IEMOP_MNEMONIC("movsb Xb,Yb");
9388
9389 /*
9390 * Sharing case implementation with movs[wdq] below.
9391 */
9392 switch (pIemCpu->enmEffAddrMode)
9393 {
9394 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
9395 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
9396 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
9397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9398 }
9399 return VINF_SUCCESS;
9400}
9401
9402
9403/** Opcode 0xa5. */
9404FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
9405{
9406 IEMOP_HLP_NO_LOCK_PREFIX();
9407
9408 /*
9409 * Use the C implementation if a repeat prefix is encountered.
9410 */
9411 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9412 {
9413 IEMOP_MNEMONIC("rep movs Xv,Yv");
9414 switch (pIemCpu->enmEffOpSize)
9415 {
9416 case IEMMODE_16BIT:
9417 switch (pIemCpu->enmEffAddrMode)
9418 {
9419 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pIemCpu->iEffSeg);
9420 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pIemCpu->iEffSeg);
9421 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pIemCpu->iEffSeg);
9422 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9423 }
9424 break;
9425 case IEMMODE_32BIT:
9426 switch (pIemCpu->enmEffAddrMode)
9427 {
9428 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pIemCpu->iEffSeg);
9429 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pIemCpu->iEffSeg);
9430 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pIemCpu->iEffSeg);
9431 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9432 }
9433 case IEMMODE_64BIT:
9434 switch (pIemCpu->enmEffAddrMode)
9435 {
9436 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
9437 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pIemCpu->iEffSeg);
9438 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pIemCpu->iEffSeg);
9439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9440 }
9441 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9442 }
9443 }
9444 IEMOP_MNEMONIC("movs Xv,Yv");
9445
9446 /*
9447 * Annoying double switch here.
9448 * Using ugly macro for implementing the cases, sharing it with movsb.
9449 */
9450 switch (pIemCpu->enmEffOpSize)
9451 {
9452 case IEMMODE_16BIT:
9453 switch (pIemCpu->enmEffAddrMode)
9454 {
9455 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
9456 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
9457 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
9458 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9459 }
9460 break;
9461
9462 case IEMMODE_32BIT:
9463 switch (pIemCpu->enmEffAddrMode)
9464 {
9465 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
9466 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
9467 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
9468 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9469 }
9470 break;
9471
9472 case IEMMODE_64BIT:
9473 switch (pIemCpu->enmEffAddrMode)
9474 {
9475 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
9476 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
9477 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
9478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9479 }
9480 break;
9481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9482 }
9483 return VINF_SUCCESS;
9484}
9485
9486#undef IEM_MOVS_CASE
9487
9488/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
9489#define IEM_CMPS_CASE(ValBits, AddrBits) \
9490 IEM_MC_BEGIN(3, 3); \
9491 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
9492 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
9493 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9494 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
9495 IEM_MC_LOCAL(RTGCPTR, uAddr); \
9496 \
9497 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
9498 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pIemCpu->iEffSeg, uAddr); \
9499 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
9500 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
9501 IEM_MC_REF_LOCAL(puValue1, uValue1); \
9502 IEM_MC_REF_EFLAGS(pEFlags); \
9503 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
9504 \
9505 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
9506 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9507 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
9508 } IEM_MC_ELSE() { \
9509 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9510 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
9511 } IEM_MC_ENDIF(); \
9512 IEM_MC_ADVANCE_RIP(); \
9513 IEM_MC_END(); \
9514
9515/** Opcode 0xa6. */
9516FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
9517{
9518 IEMOP_HLP_NO_LOCK_PREFIX();
9519
9520 /*
9521 * Use the C implementation if a repeat prefix is encountered.
9522 */
9523 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
9524 {
9525 IEMOP_MNEMONIC("repe cmps Xb,Yb");
9526 switch (pIemCpu->enmEffAddrMode)
9527 {
9528 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pIemCpu->iEffSeg);
9529 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pIemCpu->iEffSeg);
9530 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pIemCpu->iEffSeg);
9531 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9532 }
9533 }
9534 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
9535 {
9536 IEMOP_MNEMONIC("repe cmps Xb,Yb");
9537 switch (pIemCpu->enmEffAddrMode)
9538 {
9539 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pIemCpu->iEffSeg);
9540 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pIemCpu->iEffSeg);
9541 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pIemCpu->iEffSeg);
9542 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9543 }
9544 }
9545 IEMOP_MNEMONIC("cmps Xb,Yb");
9546
9547 /*
9548 * Sharing case implementation with cmps[wdq] below.
9549 */
9550 switch (pIemCpu->enmEffAddrMode)
9551 {
9552 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
9553 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
9554 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
9555 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9556 }
9557 return VINF_SUCCESS;
9558
9559}
9560
9561
9562/** Opcode 0xa7. */
9563FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
9564{
9565 IEMOP_HLP_NO_LOCK_PREFIX();
9566
9567 /*
9568 * Use the C implementation if a repeat prefix is encountered.
9569 */
9570 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
9571 {
9572 IEMOP_MNEMONIC("repe cmps Xv,Yv");
9573 switch (pIemCpu->enmEffOpSize)
9574 {
9575 case IEMMODE_16BIT:
9576 switch (pIemCpu->enmEffAddrMode)
9577 {
9578 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pIemCpu->iEffSeg);
9579 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pIemCpu->iEffSeg);
9580 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pIemCpu->iEffSeg);
9581 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9582 }
9583 break;
9584 case IEMMODE_32BIT:
9585 switch (pIemCpu->enmEffAddrMode)
9586 {
9587 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pIemCpu->iEffSeg);
9588 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pIemCpu->iEffSeg);
9589 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pIemCpu->iEffSeg);
9590 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9591 }
9592 case IEMMODE_64BIT:
9593 switch (pIemCpu->enmEffAddrMode)
9594 {
9595 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
9596 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pIemCpu->iEffSeg);
9597 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pIemCpu->iEffSeg);
9598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9599 }
9600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9601 }
9602 }
9603
9604 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
9605 {
9606 IEMOP_MNEMONIC("repne cmps Xv,Yv");
9607 switch (pIemCpu->enmEffOpSize)
9608 {
9609 case IEMMODE_16BIT:
9610 switch (pIemCpu->enmEffAddrMode)
9611 {
9612 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pIemCpu->iEffSeg);
9613 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pIemCpu->iEffSeg);
9614 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pIemCpu->iEffSeg);
9615 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9616 }
9617 break;
9618 case IEMMODE_32BIT:
9619 switch (pIemCpu->enmEffAddrMode)
9620 {
9621 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pIemCpu->iEffSeg);
9622 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pIemCpu->iEffSeg);
9623 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pIemCpu->iEffSeg);
9624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9625 }
9626 case IEMMODE_64BIT:
9627 switch (pIemCpu->enmEffAddrMode)
9628 {
9629 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
9630 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pIemCpu->iEffSeg);
9631 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pIemCpu->iEffSeg);
9632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9633 }
9634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9635 }
9636 }
9637
9638 IEMOP_MNEMONIC("cmps Xv,Yv");
9639
9640 /*
9641 * Annoying double switch here.
9642 * Using ugly macro for implementing the cases, sharing it with cmpsb.
9643 */
9644 switch (pIemCpu->enmEffOpSize)
9645 {
9646 case IEMMODE_16BIT:
9647 switch (pIemCpu->enmEffAddrMode)
9648 {
9649 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
9650 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
9651 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
9652 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9653 }
9654 break;
9655
9656 case IEMMODE_32BIT:
9657 switch (pIemCpu->enmEffAddrMode)
9658 {
9659 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
9660 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
9661 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
9662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9663 }
9664 break;
9665
9666 case IEMMODE_64BIT:
9667 switch (pIemCpu->enmEffAddrMode)
9668 {
9669 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
9670 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
9671 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
9672 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9673 }
9674 break;
9675 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9676 }
9677 return VINF_SUCCESS;
9678
9679}
9680
9681#undef IEM_CMPS_CASE
9682
9683/** Opcode 0xa8. */
9684FNIEMOP_DEF(iemOp_test_AL_Ib)
9685{
9686 IEMOP_MNEMONIC("test al,Ib");
9687 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9688 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
9689}
9690
9691
9692/** Opcode 0xa9. */
9693FNIEMOP_DEF(iemOp_test_eAX_Iz)
9694{
9695 IEMOP_MNEMONIC("test rAX,Iz");
9696 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9697 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
9698}
9699
9700
9701/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
9702#define IEM_STOS_CASE(ValBits, AddrBits) \
9703 IEM_MC_BEGIN(0, 2); \
9704 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
9705 IEM_MC_LOCAL(RTGCPTR, uAddr); \
9706 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
9707 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
9708 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
9709 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
9710 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9711 } IEM_MC_ELSE() { \
9712 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9713 } IEM_MC_ENDIF(); \
9714 IEM_MC_ADVANCE_RIP(); \
9715 IEM_MC_END(); \
9716
9717/** Opcode 0xaa. */
9718FNIEMOP_DEF(iemOp_stosb_Yb_AL)
9719{
9720 IEMOP_HLP_NO_LOCK_PREFIX();
9721
9722 /*
9723 * Use the C implementation if a repeat prefix is encountered.
9724 */
9725 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9726 {
9727 IEMOP_MNEMONIC("rep stos Yb,al");
9728 switch (pIemCpu->enmEffAddrMode)
9729 {
9730 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
9731 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
9732 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
9733 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9734 }
9735 }
9736 IEMOP_MNEMONIC("stos Yb,al");
9737
9738 /*
9739 * Sharing case implementation with stos[wdq] below.
9740 */
9741 switch (pIemCpu->enmEffAddrMode)
9742 {
9743 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
9744 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
9745 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
9746 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9747 }
9748 return VINF_SUCCESS;
9749}
9750
9751
9752/** Opcode 0xab. */
9753FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
9754{
9755 IEMOP_HLP_NO_LOCK_PREFIX();
9756
9757 /*
9758 * Use the C implementation if a repeat prefix is encountered.
9759 */
9760 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9761 {
9762 IEMOP_MNEMONIC("rep stos Yv,rAX");
9763 switch (pIemCpu->enmEffOpSize)
9764 {
9765 case IEMMODE_16BIT:
9766 switch (pIemCpu->enmEffAddrMode)
9767 {
9768 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
9769 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
9770 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
9771 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9772 }
9773 break;
9774 case IEMMODE_32BIT:
9775 switch (pIemCpu->enmEffAddrMode)
9776 {
9777 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
9778 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
9779 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
9780 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9781 }
9782 case IEMMODE_64BIT:
9783 switch (pIemCpu->enmEffAddrMode)
9784 {
9785 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
9786 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
9787 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
9788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9789 }
9790 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9791 }
9792 }
9793 IEMOP_MNEMONIC("stos Yv,rAX");
9794
9795 /*
9796 * Annoying double switch here.
9797 * Using ugly macro for implementing the cases, sharing it with stosb.
9798 */
9799 switch (pIemCpu->enmEffOpSize)
9800 {
9801 case IEMMODE_16BIT:
9802 switch (pIemCpu->enmEffAddrMode)
9803 {
9804 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
9805 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
9806 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
9807 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9808 }
9809 break;
9810
9811 case IEMMODE_32BIT:
9812 switch (pIemCpu->enmEffAddrMode)
9813 {
9814 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
9815 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
9816 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
9817 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9818 }
9819 break;
9820
9821 case IEMMODE_64BIT:
9822 switch (pIemCpu->enmEffAddrMode)
9823 {
9824 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
9825 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
9826 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
9827 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9828 }
9829 break;
9830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9831 }
9832 return VINF_SUCCESS;
9833}
9834
9835#undef IEM_STOS_CASE
9836
9837/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
9838#define IEM_LODS_CASE(ValBits, AddrBits) \
9839 IEM_MC_BEGIN(0, 2); \
9840 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
9841 IEM_MC_LOCAL(RTGCPTR, uAddr); \
9842 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
9843 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
9844 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
9845 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
9846 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
9847 } IEM_MC_ELSE() { \
9848 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
9849 } IEM_MC_ENDIF(); \
9850 IEM_MC_ADVANCE_RIP(); \
9851 IEM_MC_END();
9852
9853/** Opcode 0xac. */
9854FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
9855{
9856 IEMOP_HLP_NO_LOCK_PREFIX();
9857
9858 /*
9859 * Use the C implementation if a repeat prefix is encountered.
9860 */
9861 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9862 {
9863 IEMOP_MNEMONIC("rep lodsb al,Xb");
9864 switch (pIemCpu->enmEffAddrMode)
9865 {
9866 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pIemCpu->iEffSeg);
9867 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pIemCpu->iEffSeg);
9868 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pIemCpu->iEffSeg);
9869 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9870 }
9871 }
9872 IEMOP_MNEMONIC("lodsb al,Xb");
9873
9874 /*
9875 * Sharing case implementation with stos[wdq] below.
9876 */
9877 switch (pIemCpu->enmEffAddrMode)
9878 {
9879 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
9880 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
9881 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
9882 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9883 }
9884 return VINF_SUCCESS;
9885}
9886
9887
9888/** Opcode 0xad. */
9889FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
9890{
9891 IEMOP_HLP_NO_LOCK_PREFIX();
9892
9893 /*
9894 * Use the C implementation if a repeat prefix is encountered.
9895 */
9896 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
9897 {
9898 IEMOP_MNEMONIC("rep lods rAX,Xv");
9899 switch (pIemCpu->enmEffOpSize)
9900 {
9901 case IEMMODE_16BIT:
9902 switch (pIemCpu->enmEffAddrMode)
9903 {
9904 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pIemCpu->iEffSeg);
9905 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pIemCpu->iEffSeg);
9906 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pIemCpu->iEffSeg);
9907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9908 }
9909 break;
9910 case IEMMODE_32BIT:
9911 switch (pIemCpu->enmEffAddrMode)
9912 {
9913 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pIemCpu->iEffSeg);
9914 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pIemCpu->iEffSeg);
9915 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pIemCpu->iEffSeg);
9916 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9917 }
9918 case IEMMODE_64BIT:
9919 switch (pIemCpu->enmEffAddrMode)
9920 {
9921 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
9922 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pIemCpu->iEffSeg);
9923 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pIemCpu->iEffSeg);
9924 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9925 }
9926 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9927 }
9928 }
9929 IEMOP_MNEMONIC("lods rAX,Xv");
9930
9931 /*
9932 * Annoying double switch here.
9933 * Using ugly macro for implementing the cases, sharing it with lodsb.
9934 */
9935 switch (pIemCpu->enmEffOpSize)
9936 {
9937 case IEMMODE_16BIT:
9938 switch (pIemCpu->enmEffAddrMode)
9939 {
9940 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
9941 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
9942 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
9943 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9944 }
9945 break;
9946
9947 case IEMMODE_32BIT:
9948 switch (pIemCpu->enmEffAddrMode)
9949 {
9950 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
9951 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
9952 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
9953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9954 }
9955 break;
9956
9957 case IEMMODE_64BIT:
9958 switch (pIemCpu->enmEffAddrMode)
9959 {
9960 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
9961 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
9962 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
9963 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9964 }
9965 break;
9966 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9967 }
9968 return VINF_SUCCESS;
9969}
9970
9971#undef IEM_LODS_CASE
9972
9973/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
9974#define IEM_SCAS_CASE(ValBits, AddrBits) \
9975 IEM_MC_BEGIN(3, 2); \
9976 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
9977 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
9978 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9979 IEM_MC_LOCAL(RTGCPTR, uAddr); \
9980 \
9981 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
9982 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
9983 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
9984 IEM_MC_REF_EFLAGS(pEFlags); \
9985 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
9986 \
9987 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
9988 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9989 } IEM_MC_ELSE() { \
9990 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
9991 } IEM_MC_ENDIF(); \
9992 IEM_MC_ADVANCE_RIP(); \
9993 IEM_MC_END();
9994
9995/** Opcode 0xae. */
9996FNIEMOP_DEF(iemOp_scasb_AL_Xb)
9997{
9998 IEMOP_HLP_NO_LOCK_PREFIX();
9999
10000 /*
10001 * Use the C implementation if a repeat prefix is encountered.
10002 */
10003 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
10004 {
10005 IEMOP_MNEMONIC("repe scasb al,Xb");
10006 switch (pIemCpu->enmEffAddrMode)
10007 {
10008 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
10009 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
10010 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
10011 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10012 }
10013 }
10014 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
10015 {
10016 IEMOP_MNEMONIC("repne scasb al,Xb");
10017 switch (pIemCpu->enmEffAddrMode)
10018 {
10019 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
10020 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
10021 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
10022 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10023 }
10024 }
10025 IEMOP_MNEMONIC("scasb al,Xb");
10026
10027 /*
10028 * Sharing case implementation with stos[wdq] below.
10029 */
10030 switch (pIemCpu->enmEffAddrMode)
10031 {
10032 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
10033 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
10034 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
10035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10036 }
10037 return VINF_SUCCESS;
10038}
10039
10040
10041/** Opcode 0xaf. */
10042FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
10043{
10044 IEMOP_HLP_NO_LOCK_PREFIX();
10045
10046 /*
10047 * Use the C implementation if a repeat prefix is encountered.
10048 */
10049 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
10050 {
10051 IEMOP_MNEMONIC("repe scas rAX,Xv");
10052 switch (pIemCpu->enmEffOpSize)
10053 {
10054 case IEMMODE_16BIT:
10055 switch (pIemCpu->enmEffAddrMode)
10056 {
10057 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
10058 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
10059 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
10060 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10061 }
10062 break;
10063 case IEMMODE_32BIT:
10064 switch (pIemCpu->enmEffAddrMode)
10065 {
10066 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
10067 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
10068 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
10069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10070 }
10071 case IEMMODE_64BIT:
10072 switch (pIemCpu->enmEffAddrMode)
10073 {
10074 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
10075 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
10076 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
10077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10078 }
10079 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10080 }
10081 }
10082 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
10083 {
10084 IEMOP_MNEMONIC("repne scas rAX,Xv");
10085 switch (pIemCpu->enmEffOpSize)
10086 {
10087 case IEMMODE_16BIT:
10088 switch (pIemCpu->enmEffAddrMode)
10089 {
10090 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
10091 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
10092 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
10093 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10094 }
10095 break;
10096 case IEMMODE_32BIT:
10097 switch (pIemCpu->enmEffAddrMode)
10098 {
10099 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
10100 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
10101 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
10102 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10103 }
10104 case IEMMODE_64BIT:
10105 switch (pIemCpu->enmEffAddrMode)
10106 {
10107 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_3);
10108 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
10109 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
10110 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10111 }
10112 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10113 }
10114 }
10115 IEMOP_MNEMONIC("scas rAX,Xv");
10116
10117 /*
10118 * Annoying double switch here.
10119 * Using ugly macro for implementing the cases, sharing it with scasb.
10120 */
10121 switch (pIemCpu->enmEffOpSize)
10122 {
10123 case IEMMODE_16BIT:
10124 switch (pIemCpu->enmEffAddrMode)
10125 {
10126 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
10127 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
10128 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
10129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10130 }
10131 break;
10132
10133 case IEMMODE_32BIT:
10134 switch (pIemCpu->enmEffAddrMode)
10135 {
10136 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
10137 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
10138 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
10139 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10140 }
10141 break;
10142
10143 case IEMMODE_64BIT:
10144 switch (pIemCpu->enmEffAddrMode)
10145 {
10146 case IEMMODE_16BIT: AssertFailedReturn(VERR_INTERNAL_ERROR_4); /* cannot be encoded */ break;
10147 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
10148 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
10149 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10150 }
10151 break;
10152 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10153 }
10154 return VINF_SUCCESS;
10155}
10156
10157#undef IEM_SCAS_CASE
10158
10159/**
10160 * Common 'mov r8, imm8' helper.
10161 */
10162FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
10163{
10164 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10165 IEMOP_HLP_NO_LOCK_PREFIX();
10166
10167 IEM_MC_BEGIN(0, 1);
10168 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
10169 IEM_MC_STORE_GREG_U8(iReg, u8Value);
10170 IEM_MC_ADVANCE_RIP();
10171 IEM_MC_END();
10172
10173 return VINF_SUCCESS;
10174}
10175
10176
10177/** Opcode 0xb0. */
10178FNIEMOP_DEF(iemOp_mov_AL_Ib)
10179{
10180 IEMOP_MNEMONIC("mov AL,Ib");
10181 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX);
10182}
10183
10184
10185/** Opcode 0xb1. */
10186FNIEMOP_DEF(iemOp_CL_Ib)
10187{
10188 IEMOP_MNEMONIC("mov CL,Ib");
10189 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX);
10190}
10191
10192
10193/** Opcode 0xb2. */
10194FNIEMOP_DEF(iemOp_DL_Ib)
10195{
10196 IEMOP_MNEMONIC("mov DL,Ib");
10197 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX);
10198}
10199
10200
10201/** Opcode 0xb3. */
10202FNIEMOP_DEF(iemOp_BL_Ib)
10203{
10204 IEMOP_MNEMONIC("mov BL,Ib");
10205 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX);
10206}
10207
10208
10209/** Opcode 0xb4. */
10210FNIEMOP_DEF(iemOp_mov_AH_Ib)
10211{
10212 IEMOP_MNEMONIC("mov AH,Ib");
10213 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP);
10214}
10215
10216
10217/** Opcode 0xb5. */
10218FNIEMOP_DEF(iemOp_CH_Ib)
10219{
10220 IEMOP_MNEMONIC("mov CH,Ib");
10221 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP);
10222}
10223
10224
10225/** Opcode 0xb6. */
10226FNIEMOP_DEF(iemOp_DH_Ib)
10227{
10228 IEMOP_MNEMONIC("mov DH,Ib");
10229 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI);
10230}
10231
10232
10233/** Opcode 0xb7. */
10234FNIEMOP_DEF(iemOp_BH_Ib)
10235{
10236 IEMOP_MNEMONIC("mov BH,Ib");
10237 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI);
10238}
10239
10240
10241/**
10242 * Common 'mov regX,immX' helper.
10243 */
10244FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
10245{
10246 switch (pIemCpu->enmEffOpSize)
10247 {
10248 case IEMMODE_16BIT:
10249 {
10250 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10251 IEMOP_HLP_NO_LOCK_PREFIX();
10252
10253 IEM_MC_BEGIN(0, 1);
10254 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
10255 IEM_MC_STORE_GREG_U16(iReg, u16Value);
10256 IEM_MC_ADVANCE_RIP();
10257 IEM_MC_END();
10258 break;
10259 }
10260
10261 case IEMMODE_32BIT:
10262 {
10263 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10264 IEMOP_HLP_NO_LOCK_PREFIX();
10265
10266 IEM_MC_BEGIN(0, 1);
10267 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
10268 IEM_MC_STORE_GREG_U32(iReg, u32Value);
10269 IEM_MC_ADVANCE_RIP();
10270 IEM_MC_END();
10271 break;
10272 }
10273 case IEMMODE_64BIT:
10274 {
10275 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm);
10276 IEMOP_HLP_NO_LOCK_PREFIX();
10277
10278 IEM_MC_BEGIN(0, 1);
10279 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
10280 IEM_MC_STORE_GREG_U64(iReg, u64Value);
10281 IEM_MC_ADVANCE_RIP();
10282 IEM_MC_END();
10283 break;
10284 }
10285 }
10286
10287 return VINF_SUCCESS;
10288}
10289
10290
10291/** Opcode 0xb8. */
10292FNIEMOP_DEF(iemOp_eAX_Iv)
10293{
10294 IEMOP_MNEMONIC("mov rAX,IV");
10295 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX);
10296}
10297
10298
10299/** Opcode 0xb9. */
10300FNIEMOP_DEF(iemOp_eCX_Iv)
10301{
10302 IEMOP_MNEMONIC("mov rCX,IV");
10303 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX);
10304}
10305
10306
10307/** Opcode 0xba. */
10308FNIEMOP_DEF(iemOp_eDX_Iv)
10309{
10310 IEMOP_MNEMONIC("mov rDX,IV");
10311 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX);
10312}
10313
10314
10315/** Opcode 0xbb. */
10316FNIEMOP_DEF(iemOp_eBX_Iv)
10317{
10318 IEMOP_MNEMONIC("mov rBX,IV");
10319 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX);
10320}
10321
10322
10323/** Opcode 0xbc. */
10324FNIEMOP_DEF(iemOp_eSP_Iv)
10325{
10326 IEMOP_MNEMONIC("mov rSP,IV");
10327 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP);
10328}
10329
10330
10331/** Opcode 0xbd. */
10332FNIEMOP_DEF(iemOp_eBP_Iv)
10333{
10334 IEMOP_MNEMONIC("mov rBP,IV");
10335 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP);
10336}
10337
10338
10339/** Opcode 0xbe. */
10340FNIEMOP_DEF(iemOp_eSI_Iv)
10341{
10342 IEMOP_MNEMONIC("mov rSI,IV");
10343 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI);
10344}
10345
10346
10347/** Opcode 0xbf. */
10348FNIEMOP_DEF(iemOp_eDI_Iv)
10349{
10350 IEMOP_MNEMONIC("mov rDI,IV");
10351 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI);
10352}
10353
10354
10355/** Opcode 0xc0. */
10356FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
10357{
10358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10359 PCIEMOPSHIFTSIZES pImpl;
10360 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10361 {
10362 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
10363 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
10364 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
10365 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
10366 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
10367 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
10368 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
10369 case 6: return IEMOP_RAISE_INVALID_LOCK_PREFIX();
10370 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
10371 }
10372 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
10373
10374 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10375 {
10376 /* register */
10377 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
10378 IEMOP_HLP_NO_LOCK_PREFIX();
10379 IEM_MC_BEGIN(3, 0);
10380 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10381 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
10382 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10383 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10384 IEM_MC_REF_EFLAGS(pEFlags);
10385 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
10386 IEM_MC_ADVANCE_RIP();
10387 IEM_MC_END();
10388 }
10389 else
10390 {
10391 /* memory */
10392 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10393 IEM_MC_BEGIN(3, 2);
10394 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10395 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10396 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10398
10399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10400 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
10401 IEM_MC_ASSIGN(cShiftArg, cShift);
10402 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10403 IEM_MC_FETCH_EFLAGS(EFlags);
10404 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
10405
10406 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10407 IEM_MC_COMMIT_EFLAGS(EFlags);
10408 IEM_MC_ADVANCE_RIP();
10409 IEM_MC_END();
10410 }
10411 return VINF_SUCCESS;
10412}
10413
10414
10415/** Opcode 0xc1. */
10416FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
10417{
10418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10419 PCIEMOPSHIFTSIZES pImpl;
10420 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10421 {
10422 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
10423 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
10424 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
10425 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
10426 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
10427 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
10428 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
10429 case 6: return IEMOP_RAISE_INVALID_LOCK_PREFIX();
10430 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
10431 }
10432 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
10433
10434 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10435 {
10436 /* register */
10437 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
10438 IEMOP_HLP_NO_LOCK_PREFIX();
10439 switch (pIemCpu->enmEffOpSize)
10440 {
10441 case IEMMODE_16BIT:
10442 IEM_MC_BEGIN(3, 0);
10443 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10444 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
10445 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10446 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10447 IEM_MC_REF_EFLAGS(pEFlags);
10448 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
10449 IEM_MC_ADVANCE_RIP();
10450 IEM_MC_END();
10451 return VINF_SUCCESS;
10452
10453 case IEMMODE_32BIT:
10454 IEM_MC_BEGIN(3, 0);
10455 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10456 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
10457 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10458 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10459 IEM_MC_REF_EFLAGS(pEFlags);
10460 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
10461 IEM_MC_ADVANCE_RIP();
10462 IEM_MC_END();
10463 return VINF_SUCCESS;
10464
10465 case IEMMODE_64BIT:
10466 IEM_MC_BEGIN(3, 0);
10467 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10468 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
10469 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10470 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10471 IEM_MC_REF_EFLAGS(pEFlags);
10472 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
10473 IEM_MC_ADVANCE_RIP();
10474 IEM_MC_END();
10475 return VINF_SUCCESS;
10476
10477 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10478 }
10479 }
10480 else
10481 {
10482 /* memory */
10483 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10484 switch (pIemCpu->enmEffOpSize)
10485 {
10486 case IEMMODE_16BIT:
10487 IEM_MC_BEGIN(3, 2);
10488 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10489 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10490 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10492
10493 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10494 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
10495 IEM_MC_ASSIGN(cShiftArg, cShift);
10496 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10497 IEM_MC_FETCH_EFLAGS(EFlags);
10498 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
10499
10500 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10501 IEM_MC_COMMIT_EFLAGS(EFlags);
10502 IEM_MC_ADVANCE_RIP();
10503 IEM_MC_END();
10504 return VINF_SUCCESS;
10505
10506 case IEMMODE_32BIT:
10507 IEM_MC_BEGIN(3, 2);
10508 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10509 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10510 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10512
10513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10514 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
10515 IEM_MC_ASSIGN(cShiftArg, cShift);
10516 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10517 IEM_MC_FETCH_EFLAGS(EFlags);
10518 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
10519
10520 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10521 IEM_MC_COMMIT_EFLAGS(EFlags);
10522 IEM_MC_ADVANCE_RIP();
10523 IEM_MC_END();
10524 return VINF_SUCCESS;
10525
10526 case IEMMODE_64BIT:
10527 IEM_MC_BEGIN(3, 2);
10528 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10529 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10530 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10532
10533 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10534 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
10535 IEM_MC_ASSIGN(cShiftArg, cShift);
10536 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10537 IEM_MC_FETCH_EFLAGS(EFlags);
10538 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
10539
10540 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10541 IEM_MC_COMMIT_EFLAGS(EFlags);
10542 IEM_MC_ADVANCE_RIP();
10543 IEM_MC_END();
10544 return VINF_SUCCESS;
10545
10546 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10547 }
10548 }
10549}
10550
10551
10552/** Opcode 0xc2. */
10553FNIEMOP_DEF(iemOp_retn_Iw)
10554{
10555 IEMOP_MNEMONIC("retn Iw");
10556 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10557 IEMOP_HLP_NO_LOCK_PREFIX();
10558 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10559 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, u16Imm);
10560}
10561
10562
10563/** Opcode 0xc3. */
10564FNIEMOP_DEF(iemOp_retn)
10565{
10566 IEMOP_MNEMONIC("retn");
10567 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10568 IEMOP_HLP_NO_LOCK_PREFIX();
10569 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, 0);
10570}
10571
10572
10573/** Opcode 0xc4. */
10574FNIEMOP_DEF(iemOp_les_Gv_Mp)
10575{
10576 IEMOP_MNEMONIC("les Gv,Mp");
10577 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_ES);
10578}
10579
10580
10581/** Opcode 0xc5. */
10582FNIEMOP_DEF(iemOp_lds_Gv_Mp)
10583{
10584 IEMOP_MNEMONIC("lds Gv,Mp");
10585 return FNIEMOP_CALL_1(iemOpCommonLoadSRegAndGreg, X86_SREG_DS);
10586}
10587
10588
10589/** Opcode 0xc6. */
10590FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
10591{
10592 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10593 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10594 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
10595 return IEMOP_RAISE_INVALID_OPCODE();
10596 IEMOP_MNEMONIC("mov Eb,Ib");
10597
10598 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10599 {
10600 /* register access */
10601 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10602 IEM_MC_BEGIN(0, 0);
10603 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Imm);
10604 IEM_MC_ADVANCE_RIP();
10605 IEM_MC_END();
10606 }
10607 else
10608 {
10609 /* memory access. */
10610 IEM_MC_BEGIN(0, 1);
10611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10613 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10614 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Imm);
10615 IEM_MC_ADVANCE_RIP();
10616 IEM_MC_END();
10617 }
10618 return VINF_SUCCESS;
10619}
10620
10621
10622/** Opcode 0xc7. */
10623FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
10624{
10625 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10626 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10627 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
10628 return IEMOP_RAISE_INVALID_OPCODE();
10629 IEMOP_MNEMONIC("mov Ev,Iz");
10630
10631 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10632 {
10633 /* register access */
10634 switch (pIemCpu->enmEffOpSize)
10635 {
10636 case IEMMODE_16BIT:
10637 IEM_MC_BEGIN(0, 0);
10638 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10639 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Imm);
10640 IEM_MC_ADVANCE_RIP();
10641 IEM_MC_END();
10642 return VINF_SUCCESS;
10643
10644 case IEMMODE_32BIT:
10645 IEM_MC_BEGIN(0, 0);
10646 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10647 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Imm);
10648 IEM_MC_ADVANCE_RIP();
10649 IEM_MC_END();
10650 return VINF_SUCCESS;
10651
10652 case IEMMODE_64BIT:
10653 IEM_MC_BEGIN(0, 0);
10654 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm);
10655 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Imm);
10656 IEM_MC_ADVANCE_RIP();
10657 IEM_MC_END();
10658 return VINF_SUCCESS;
10659
10660 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10661 }
10662 }
10663 else
10664 {
10665 /* memory access. */
10666 switch (pIemCpu->enmEffOpSize)
10667 {
10668 case IEMMODE_16BIT:
10669 IEM_MC_BEGIN(0, 1);
10670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10672 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10673 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Imm);
10674 IEM_MC_ADVANCE_RIP();
10675 IEM_MC_END();
10676 return VINF_SUCCESS;
10677
10678 case IEMMODE_32BIT:
10679 IEM_MC_BEGIN(0, 1);
10680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10682 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10683 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Imm);
10684 IEM_MC_ADVANCE_RIP();
10685 IEM_MC_END();
10686 return VINF_SUCCESS;
10687
10688 case IEMMODE_64BIT:
10689 IEM_MC_BEGIN(0, 1);
10690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10692 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm);
10693 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Imm);
10694 IEM_MC_ADVANCE_RIP();
10695 IEM_MC_END();
10696 return VINF_SUCCESS;
10697
10698 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10699 }
10700 }
10701}
10702
10703
10704
10705
10706/** Opcode 0xc8. */
10707FNIEMOP_DEF(iemOp_enter_Iw_Ib)
10708{
10709 IEMOP_MNEMONIC("enter Iw,Ib");
10710 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10711 IEMOP_HLP_NO_LOCK_PREFIX();
10712 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
10713 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
10714 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pIemCpu->enmEffOpSize, cbFrame, u8NestingLevel);
10715}
10716
10717
10718/** Opcode 0xc9. */
10719FNIEMOP_DEF(iemOp_leave)
10720{
10721 IEMOP_MNEMONIC("retn");
10722 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10723 IEMOP_HLP_NO_LOCK_PREFIX();
10724 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pIemCpu->enmEffOpSize);
10725}
10726
10727
10728/** Opcode 0xca. */
10729FNIEMOP_DEF(iemOp_retf_Iw)
10730{
10731 IEMOP_MNEMONIC("retf Iw");
10732 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10733 IEMOP_HLP_NO_LOCK_PREFIX();
10734 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10735 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, u16Imm);
10736}
10737
10738
10739/** Opcode 0xcb. */
10740FNIEMOP_DEF(iemOp_retf)
10741{
10742 IEMOP_MNEMONIC("retf");
10743 IEMOP_HLP_NO_LOCK_PREFIX();
10744 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10745 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, 0);
10746}
10747
10748
10749/** Opcode 0xcc. */
10750FNIEMOP_DEF(iemOp_int_3)
10751{
10752 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
10753}
10754
10755
10756/** Opcode 0xcd. */
10757FNIEMOP_DEF(iemOp_int_Ib)
10758{
10759 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
10760 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
10761}
10762
10763
10764/** Opcode 0xce. */
10765FNIEMOP_DEF(iemOp_into)
10766{
10767 IEM_MC_BEGIN(2, 0);
10768 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
10769 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
10770 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
10771 IEM_MC_END();
10772 return VINF_SUCCESS;
10773}
10774
10775
10776/** Opcode 0xcf. */
10777FNIEMOP_DEF(iemOp_iret)
10778{
10779 IEMOP_MNEMONIC("iret");
10780 IEMOP_HLP_NO_LOCK_PREFIX();
10781 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pIemCpu->enmEffOpSize);
10782}
10783
10784
10785/** Opcode 0xd0. */
10786FNIEMOP_DEF(iemOp_Grp2_Eb_1)
10787{
10788 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10789 PCIEMOPSHIFTSIZES pImpl;
10790 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10791 {
10792 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
10793 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
10794 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
10795 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
10796 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
10797 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
10798 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
10799 case 6: return IEMOP_RAISE_INVALID_LOCK_PREFIX();
10800 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
10801 }
10802 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
10803
10804 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10805 {
10806 /* register */
10807 IEMOP_HLP_NO_LOCK_PREFIX();
10808 IEM_MC_BEGIN(3, 0);
10809 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10810 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
10811 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10812 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10813 IEM_MC_REF_EFLAGS(pEFlags);
10814 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
10815 IEM_MC_ADVANCE_RIP();
10816 IEM_MC_END();
10817 }
10818 else
10819 {
10820 /* memory */
10821 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10822 IEM_MC_BEGIN(3, 2);
10823 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10824 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
10825 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10827
10828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10829 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10830 IEM_MC_FETCH_EFLAGS(EFlags);
10831 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
10832
10833 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10834 IEM_MC_COMMIT_EFLAGS(EFlags);
10835 IEM_MC_ADVANCE_RIP();
10836 IEM_MC_END();
10837 }
10838 return VINF_SUCCESS;
10839}
10840
10841
10842
10843/** Opcode 0xd1. */
10844FNIEMOP_DEF(iemOp_Grp2_Ev_1)
10845{
10846 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10847 PCIEMOPSHIFTSIZES pImpl;
10848 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10849 {
10850 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
10851 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
10852 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
10853 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
10854 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
10855 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
10856 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
10857 case 6: return IEMOP_RAISE_INVALID_LOCK_PREFIX();
10858 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
10859 }
10860 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
10861
10862 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10863 {
10864 /* register */
10865 IEMOP_HLP_NO_LOCK_PREFIX();
10866 switch (pIemCpu->enmEffOpSize)
10867 {
10868 case IEMMODE_16BIT:
10869 IEM_MC_BEGIN(3, 0);
10870 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10871 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
10872 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10873 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10874 IEM_MC_REF_EFLAGS(pEFlags);
10875 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
10876 IEM_MC_ADVANCE_RIP();
10877 IEM_MC_END();
10878 return VINF_SUCCESS;
10879
10880 case IEMMODE_32BIT:
10881 IEM_MC_BEGIN(3, 0);
10882 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10883 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
10884 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10885 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10886 IEM_MC_REF_EFLAGS(pEFlags);
10887 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
10888 IEM_MC_ADVANCE_RIP();
10889 IEM_MC_END();
10890 return VINF_SUCCESS;
10891
10892 case IEMMODE_64BIT:
10893 IEM_MC_BEGIN(3, 0);
10894 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10895 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
10896 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10897 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10898 IEM_MC_REF_EFLAGS(pEFlags);
10899 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
10900 IEM_MC_ADVANCE_RIP();
10901 IEM_MC_END();
10902 return VINF_SUCCESS;
10903
10904 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10905 }
10906 }
10907 else
10908 {
10909 /* memory */
10910 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10911 switch (pIemCpu->enmEffOpSize)
10912 {
10913 case IEMMODE_16BIT:
10914 IEM_MC_BEGIN(3, 2);
10915 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10916 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
10917 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10919
10920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10921 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10922 IEM_MC_FETCH_EFLAGS(EFlags);
10923 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
10924
10925 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10926 IEM_MC_COMMIT_EFLAGS(EFlags);
10927 IEM_MC_ADVANCE_RIP();
10928 IEM_MC_END();
10929 return VINF_SUCCESS;
10930
10931 case IEMMODE_32BIT:
10932 IEM_MC_BEGIN(3, 2);
10933 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10934 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
10935 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10937
10938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10939 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10940 IEM_MC_FETCH_EFLAGS(EFlags);
10941 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
10942
10943 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10944 IEM_MC_COMMIT_EFLAGS(EFlags);
10945 IEM_MC_ADVANCE_RIP();
10946 IEM_MC_END();
10947 return VINF_SUCCESS;
10948
10949 case IEMMODE_64BIT:
10950 IEM_MC_BEGIN(3, 2);
10951 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10952 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
10953 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
10954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10955
10956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
10957 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
10958 IEM_MC_FETCH_EFLAGS(EFlags);
10959 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
10960
10961 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10962 IEM_MC_COMMIT_EFLAGS(EFlags);
10963 IEM_MC_ADVANCE_RIP();
10964 IEM_MC_END();
10965 return VINF_SUCCESS;
10966
10967 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10968 }
10969 }
10970}
10971
10972
10973/** Opcode 0xd2. */
10974FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
10975{
10976 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10977 PCIEMOPSHIFTSIZES pImpl;
10978 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10979 {
10980 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
10981 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
10982 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
10983 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
10984 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
10985 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
10986 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
10987 case 6: return IEMOP_RAISE_INVALID_OPCODE();
10988 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
10989 }
10990 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
10991
10992 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10993 {
10994 /* register */
10995 IEMOP_HLP_NO_LOCK_PREFIX();
10996 IEM_MC_BEGIN(3, 0);
10997 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10998 IEM_MC_ARG(uint8_t, cShiftArg, 1);
10999 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11000 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11001 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
11002 IEM_MC_REF_EFLAGS(pEFlags);
11003 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11004 IEM_MC_ADVANCE_RIP();
11005 IEM_MC_END();
11006 }
11007 else
11008 {
11009 /* memory */
11010 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11011 IEM_MC_BEGIN(3, 2);
11012 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11013 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11014 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11016
11017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
11018 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
11019 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11020 IEM_MC_FETCH_EFLAGS(EFlags);
11021 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11022
11023 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11024 IEM_MC_COMMIT_EFLAGS(EFlags);
11025 IEM_MC_ADVANCE_RIP();
11026 IEM_MC_END();
11027 }
11028 return VINF_SUCCESS;
11029}
11030
11031
11032/** Opcode 0xd3. */
11033FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
11034{
11035 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11036 PCIEMOPSHIFTSIZES pImpl;
11037 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11038 {
11039 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
11040 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
11041 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
11042 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
11043 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
11044 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
11045 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
11046 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11047 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
11048 }
11049 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11050
11051 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11052 {
11053 /* register */
11054 IEMOP_HLP_NO_LOCK_PREFIX();
11055 switch (pIemCpu->enmEffOpSize)
11056 {
11057 case IEMMODE_16BIT:
11058 IEM_MC_BEGIN(3, 0);
11059 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11060 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11061 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11062 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11063 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
11064 IEM_MC_REF_EFLAGS(pEFlags);
11065 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
11066 IEM_MC_ADVANCE_RIP();
11067 IEM_MC_END();
11068 return VINF_SUCCESS;
11069
11070 case IEMMODE_32BIT:
11071 IEM_MC_BEGIN(3, 0);
11072 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11073 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11074 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11075 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11076 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
11077 IEM_MC_REF_EFLAGS(pEFlags);
11078 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
11079 IEM_MC_ADVANCE_RIP();
11080 IEM_MC_END();
11081 return VINF_SUCCESS;
11082
11083 case IEMMODE_64BIT:
11084 IEM_MC_BEGIN(3, 0);
11085 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11086 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11087 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11088 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11089 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
11090 IEM_MC_REF_EFLAGS(pEFlags);
11091 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
11092 IEM_MC_ADVANCE_RIP();
11093 IEM_MC_END();
11094 return VINF_SUCCESS;
11095
11096 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11097 }
11098 }
11099 else
11100 {
11101 /* memory */
11102 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11103 switch (pIemCpu->enmEffOpSize)
11104 {
11105 case IEMMODE_16BIT:
11106 IEM_MC_BEGIN(3, 2);
11107 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11108 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11109 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11111
11112 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
11113 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
11114 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11115 IEM_MC_FETCH_EFLAGS(EFlags);
11116 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
11117
11118 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
11119 IEM_MC_COMMIT_EFLAGS(EFlags);
11120 IEM_MC_ADVANCE_RIP();
11121 IEM_MC_END();
11122 return VINF_SUCCESS;
11123
11124 case IEMMODE_32BIT:
11125 IEM_MC_BEGIN(3, 2);
11126 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11127 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11128 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11130
11131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
11132 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
11133 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11134 IEM_MC_FETCH_EFLAGS(EFlags);
11135 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
11136
11137 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
11138 IEM_MC_COMMIT_EFLAGS(EFlags);
11139 IEM_MC_ADVANCE_RIP();
11140 IEM_MC_END();
11141 return VINF_SUCCESS;
11142
11143 case IEMMODE_64BIT:
11144 IEM_MC_BEGIN(3, 2);
11145 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11146 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11147 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11149
11150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
11151 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
11152 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11153 IEM_MC_FETCH_EFLAGS(EFlags);
11154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
11155
11156 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
11157 IEM_MC_COMMIT_EFLAGS(EFlags);
11158 IEM_MC_ADVANCE_RIP();
11159 IEM_MC_END();
11160 return VINF_SUCCESS;
11161
11162 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11163 }
11164 }
11165}
11166
11167/** Opcode 0xd4. */
11168FNIEMOP_DEF(iemOp_aam_Ib)
11169{
11170 IEMOP_MNEMONIC("aam Ib");
11171 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11172 IEMOP_HLP_NO_LOCK_PREFIX();
11173 IEMOP_HLP_NO_64BIT();
11174 if (!bImm)
11175 return IEMOP_RAISE_DIVIDE_ERROR();
11176 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
11177}
11178
11179
11180/** Opcode 0xd5. */
11181FNIEMOP_DEF(iemOp_aad_Ib)
11182{
11183 IEMOP_MNEMONIC("aad Ib");
11184 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
11185 IEMOP_HLP_NO_LOCK_PREFIX();
11186 IEMOP_HLP_NO_64BIT();
11187 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
11188}
11189
11190
11191/** Opcode 0xd7. */
11192FNIEMOP_DEF(iemOp_xlat)
11193{
11194 IEMOP_MNEMONIC("xlat");
11195 IEMOP_HLP_NO_LOCK_PREFIX();
11196 switch (pIemCpu->enmEffAddrMode)
11197 {
11198 case IEMMODE_16BIT:
11199 IEM_MC_BEGIN(2, 0);
11200 IEM_MC_LOCAL(uint8_t, u8Tmp);
11201 IEM_MC_LOCAL(uint16_t, u16Addr);
11202 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
11203 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
11204 IEM_MC_FETCH_MEM16_U8(u8Tmp, pIemCpu->iEffSeg, u16Addr);
11205 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
11206 IEM_MC_ADVANCE_RIP();
11207 IEM_MC_END();
11208 return VINF_SUCCESS;
11209
11210 case IEMMODE_32BIT:
11211 IEM_MC_BEGIN(2, 0);
11212 IEM_MC_LOCAL(uint8_t, u8Tmp);
11213 IEM_MC_LOCAL(uint32_t, u32Addr);
11214 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
11215 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
11216 IEM_MC_FETCH_MEM32_U8(u8Tmp, pIemCpu->iEffSeg, u32Addr);
11217 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
11218 IEM_MC_ADVANCE_RIP();
11219 IEM_MC_END();
11220 return VINF_SUCCESS;
11221
11222 case IEMMODE_64BIT:
11223 IEM_MC_BEGIN(2, 0);
11224 IEM_MC_LOCAL(uint8_t, u8Tmp);
11225 IEM_MC_LOCAL(uint64_t, u64Addr);
11226 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
11227 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
11228 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u64Addr);
11229 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
11230 IEM_MC_ADVANCE_RIP();
11231 IEM_MC_END();
11232 return VINF_SUCCESS;
11233
11234 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11235 }
11236}
11237
11238
11239/**
11240 * Common worker for FPU instructions working on ST0 and STn, and storing the
11241 * result in ST0.
11242 *
11243 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11244 */
11245FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11246{
11247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11248
11249 IEM_MC_BEGIN(3, 1);
11250 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11251 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11252 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11253 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11254
11255 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11256 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11257 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
11258 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11259 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
11260 IEM_MC_ELSE()
11261 IEM_MC_FPU_STACK_UNDERFLOW(0);
11262 IEM_MC_ENDIF();
11263 IEM_MC_USED_FPU();
11264 IEM_MC_ADVANCE_RIP();
11265
11266 IEM_MC_END();
11267 return VINF_SUCCESS;
11268}
11269
11270
11271/**
11272 * Common worker for FPU instructions working on ST0 and STn, and only affecting
11273 * flags.
11274 *
11275 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11276 */
11277FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
11278{
11279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11280
11281 IEM_MC_BEGIN(3, 1);
11282 IEM_MC_LOCAL(uint16_t, u16Fsw);
11283 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11284 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11285 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11286
11287 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11288 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11289 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
11290 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
11291 IEM_MC_UPDATE_FSW(u16Fsw);
11292 IEM_MC_ELSE()
11293 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
11294 IEM_MC_ENDIF();
11295 IEM_MC_USED_FPU();
11296 IEM_MC_ADVANCE_RIP();
11297
11298 IEM_MC_END();
11299 return VINF_SUCCESS;
11300}
11301
11302
11303/**
11304 * Common worker for FPU instructions working on ST0 and STn, only affecting
11305 * flags, and popping when done.
11306 *
11307 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11308 */
11309FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
11310{
11311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11312
11313 IEM_MC_BEGIN(3, 1);
11314 IEM_MC_LOCAL(uint16_t, u16Fsw);
11315 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11316 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11317 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11318
11319 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11320 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11321 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
11322 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
11323 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
11324 IEM_MC_ELSE()
11325 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
11326 IEM_MC_ENDIF();
11327 IEM_MC_USED_FPU();
11328 IEM_MC_ADVANCE_RIP();
11329
11330 IEM_MC_END();
11331 return VINF_SUCCESS;
11332}
11333
11334
11335/** Opcode 0xd8 11/0. */
11336FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
11337{
11338 IEMOP_MNEMONIC("fadd st0,stN");
11339 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
11340}
11341
11342
11343/** Opcode 0xd8 11/1. */
11344FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
11345{
11346 IEMOP_MNEMONIC("fmul st0,stN");
11347 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
11348}
11349
11350
11351/** Opcode 0xd8 11/2. */
11352FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
11353{
11354 IEMOP_MNEMONIC("fcom st0,stN");
11355 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
11356}
11357
11358
11359/** Opcode 0xd8 11/3. */
11360FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
11361{
11362 IEMOP_MNEMONIC("fcomp st0,stN");
11363 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
11364}
11365
11366
11367/** Opcode 0xd8 11/4. */
11368FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
11369{
11370 IEMOP_MNEMONIC("fsub st0,stN");
11371 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
11372}
11373
11374
11375/** Opcode 0xd8 11/5. */
11376FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
11377{
11378 IEMOP_MNEMONIC("fsubr st0,stN");
11379 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
11380}
11381
11382
11383/** Opcode 0xd8 11/6. */
11384FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
11385{
11386 IEMOP_MNEMONIC("fdiv st0,stN");
11387 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
11388}
11389
11390
11391/** Opcode 0xd8 11/7. */
11392FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
11393{
11394 IEMOP_MNEMONIC("fdivr st0,stN");
11395 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
11396}
11397
11398
11399/**
11400 * Common worker for FPU instructions working on ST0 and an m32r, and storing
11401 * the result in ST0.
11402 *
11403 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11404 */
11405FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
11406{
11407 IEM_MC_BEGIN(3, 3);
11408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11409 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11410 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
11411 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11412 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11413 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
11414
11415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
11416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11417
11418 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11419 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11420 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
11421
11422 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
11423 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
11424 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
11425 IEM_MC_ELSE()
11426 IEM_MC_FPU_STACK_UNDERFLOW(0);
11427 IEM_MC_ENDIF();
11428 IEM_MC_USED_FPU();
11429 IEM_MC_ADVANCE_RIP();
11430
11431 IEM_MC_END();
11432 return VINF_SUCCESS;
11433}
11434
11435
11436/** Opcode 0xd8 !11/0. */
11437FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
11438{
11439 IEMOP_MNEMONIC("fadd st0,m32r");
11440 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
11441}
11442
11443
11444/** Opcode 0xd8 !11/1. */
11445FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
11446{
11447 IEMOP_MNEMONIC("fmul st0,m32r");
11448 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
11449}
11450
11451
11452/** Opcode 0xd8 !11/2. */
11453FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
11454{
11455 IEMOP_MNEMONIC("fcom st0,m32r");
11456
11457 IEM_MC_BEGIN(3, 3);
11458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11459 IEM_MC_LOCAL(uint16_t, u16Fsw);
11460 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
11461 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11462 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11463 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
11464
11465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
11466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11467
11468 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11469 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11470 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
11471
11472 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
11473 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
11474 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
11475 IEM_MC_ELSE()
11476 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
11477 IEM_MC_ENDIF();
11478 IEM_MC_USED_FPU();
11479 IEM_MC_ADVANCE_RIP();
11480
11481 IEM_MC_END();
11482 return VINF_SUCCESS;
11483}
11484
11485
11486/** Opcode 0xd8 !11/3. */
11487FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
11488{
11489 IEMOP_MNEMONIC("fcomp st0,m32r");
11490
11491 IEM_MC_BEGIN(3, 3);
11492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11493 IEM_MC_LOCAL(uint16_t, u16Fsw);
11494 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
11495 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11496 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11497 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
11498
11499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
11500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11501
11502 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11503 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11504 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
11505
11506 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
11507 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
11508 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
11509 IEM_MC_ELSE()
11510 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
11511 IEM_MC_ENDIF();
11512 IEM_MC_USED_FPU();
11513 IEM_MC_ADVANCE_RIP();
11514
11515 IEM_MC_END();
11516 return VINF_SUCCESS;
11517}
11518
11519
11520/** Opcode 0xd8 !11/4. */
11521FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
11522{
11523 IEMOP_MNEMONIC("fsub st0,m32r");
11524 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
11525}
11526
11527
11528/** Opcode 0xd8 !11/5. */
11529FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
11530{
11531 IEMOP_MNEMONIC("fsubr st0,m32r");
11532 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
11533}
11534
11535
11536/** Opcode 0xd8 !11/6. */
11537FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
11538{
11539 IEMOP_MNEMONIC("fdiv st0,m32r");
11540 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
11541}
11542
11543
11544/** Opcode 0xd8 !11/7. */
11545FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
11546{
11547 IEMOP_MNEMONIC("fdivr st0,m32r");
11548 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
11549}
11550
11551
11552/** Opcode 0xd8. */
11553FNIEMOP_DEF(iemOp_EscF0)
11554{
11555 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
11556 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11557
11558 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11559 {
11560 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11561 {
11562 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
11563 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
11564 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
11565 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
11566 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
11567 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
11568 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
11569 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
11570 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11571 }
11572 }
11573 else
11574 {
11575 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11576 {
11577 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
11578 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
11579 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
11580 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
11581 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
11582 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
11583 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
11584 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
11585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11586 }
11587 }
11588}
11589
11590
11591/** Opcode 0xd9 /0 mem32real
11592 * @sa iemOp_fld_m64r */
11593FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
11594{
11595 IEMOP_MNEMONIC("fld m32r");
11596
11597 IEM_MC_BEGIN(2, 3);
11598 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11599 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11600 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
11601 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11602 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
11603
11604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
11605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11606
11607 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11608 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11609 IEM_MC_FETCH_MEM_R32(r32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
11610
11611 IEM_MC_IF_FPUREG_IS_EMPTY(7)
11612 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
11613 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
11614 IEM_MC_ELSE()
11615 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
11616 IEM_MC_ENDIF();
11617 IEM_MC_USED_FPU();
11618 IEM_MC_ADVANCE_RIP();
11619
11620 IEM_MC_END();
11621 return VINF_SUCCESS;
11622}
11623
11624
11625/** Opcode 0xd9 !11/2 mem32real */
11626FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
11627{
11628 IEMOP_MNEMONIC("fst m32r");
11629 IEM_MC_BEGIN(3, 2);
11630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11631 IEM_MC_LOCAL(uint16_t, u16Fsw);
11632 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11633 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
11634 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11635
11636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
11637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11638 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11639 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11640
11641 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
11642 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
11643 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
11644 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
11645 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
11646 IEM_MC_ELSE()
11647 IEM_MC_IF_FCW_IM()
11648 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
11649 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
11650 IEM_MC_ENDIF();
11651 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
11652 IEM_MC_ENDIF();
11653 IEM_MC_USED_FPU();
11654 IEM_MC_ADVANCE_RIP();
11655
11656 IEM_MC_END();
11657 return VINF_SUCCESS;
11658}
11659
11660
11661/** Opcode 0xd9 !11/3 */
11662FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
11663{
11664 IEMOP_MNEMONIC("fstp m32r");
11665 IEM_MC_BEGIN(3, 2);
11666 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11667 IEM_MC_LOCAL(uint16_t, u16Fsw);
11668 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11669 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
11670 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11671
11672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
11673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11674 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11675 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11676
11677 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
11678 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
11679 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
11680 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
11681 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
11682 IEM_MC_ELSE()
11683 IEM_MC_IF_FCW_IM()
11684 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
11685 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
11686 IEM_MC_ENDIF();
11687 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
11688 IEM_MC_ENDIF();
11689 IEM_MC_USED_FPU();
11690 IEM_MC_ADVANCE_RIP();
11691
11692 IEM_MC_END();
11693 return VINF_SUCCESS;
11694}
11695
11696
11697/** Opcode 0xd9 !11/4 */
11698FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
11699{
11700 IEMOP_MNEMONIC("fldenv m14/28byte");
11701 IEM_MC_BEGIN(3, 0);
11702 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
11703 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pIemCpu->iEffSeg, 1);
11704 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
11706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11707 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11708 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11709 IEM_MC_END();
11710 return VINF_SUCCESS;
11711}
11712
11713
11714/** Opcode 0xd9 !11/5 */
11715FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
11716{
11717 IEMOP_MNEMONIC("fldcw m2byte");
11718 IEM_MC_BEGIN(1, 1);
11719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11720 IEM_MC_ARG(uint16_t, u16Fsw, 0);
11721 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
11722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11723 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11724 IEM_MC_FETCH_MEM_U16(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
11725 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
11726 IEM_MC_END();
11727 return VINF_SUCCESS;
11728}
11729
11730
11731/** Opcode 0xd9 !11/6 */
11732FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
11733{
11734 IEMOP_MNEMONIC("fstenv m14/m28byte");
11735 IEM_MC_BEGIN(3, 0);
11736 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
11737 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pIemCpu->iEffSeg, 1);
11738 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
11740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11741 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11742 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
11743 IEM_MC_END();
11744 return VINF_SUCCESS;
11745}
11746
11747
11748/** Opcode 0xd9 !11/7 */
11749FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
11750{
11751 IEMOP_MNEMONIC("fnstcw m2byte");
11752 IEM_MC_BEGIN(2, 0);
11753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11754 IEM_MC_LOCAL(uint16_t, u16Fcw);
11755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
11756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11757 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11758 IEM_MC_FETCH_FCW(u16Fcw);
11759 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Fcw);
11760 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
11761 IEM_MC_END();
11762 return VINF_SUCCESS;
11763}
11764
11765
11766/** Opcode 0xd9 0xc9, 0xd9 0xd8-0xdf, ++?. */
11767FNIEMOP_DEF(iemOp_fnop)
11768{
11769 IEMOP_MNEMONIC("fnop");
11770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11771
11772 IEM_MC_BEGIN(0, 0);
11773 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11774 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11775 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
11776 * intel optimizations. Investigate. */
11777 IEM_MC_UPDATE_FPU_OPCODE_IP();
11778 IEM_MC_USED_FPU();
11779 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
11780 IEM_MC_END();
11781 return VINF_SUCCESS;
11782}
11783
11784
11785/** Opcode 0xd9 11/0 stN */
11786FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
11787{
11788 IEMOP_MNEMONIC("fld stN");
11789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11790
11791 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
11792 * indicates that it does. */
11793 IEM_MC_BEGIN(0, 2);
11794 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11795 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11796 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11797 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11798 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
11799 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11800 IEM_MC_PUSH_FPU_RESULT(FpuRes);
11801 IEM_MC_ELSE()
11802 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
11803 IEM_MC_ENDIF();
11804 IEM_MC_USED_FPU();
11805 IEM_MC_ADVANCE_RIP();
11806 IEM_MC_END();
11807
11808 return VINF_SUCCESS;
11809}
11810
11811
11812/** Opcode 0xd9 11/3 stN */
11813FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
11814{
11815 IEMOP_MNEMONIC("fxch stN");
11816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11817
11818 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
11819 * indicates that it does. */
11820 IEM_MC_BEGIN(1, 3);
11821 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
11822 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
11823 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11824 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
11825 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11826 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11827 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
11828 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
11829 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
11830 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
11831 IEM_MC_ELSE()
11832 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
11833 IEM_MC_ENDIF();
11834 IEM_MC_USED_FPU();
11835 IEM_MC_ADVANCE_RIP();
11836 IEM_MC_END();
11837
11838 return VINF_SUCCESS;
11839}
11840
11841
11842/** Opcode 0xd9 11/4, 0xdd 11/2. */
11843FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
11844{
11845 IEMOP_MNEMONIC("fstp st0,stN");
11846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11847
11848 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
11849 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
11850 if (!iDstReg)
11851 {
11852 IEM_MC_BEGIN(0, 1);
11853 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
11854 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11855 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11856 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
11857 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
11858 IEM_MC_ELSE()
11859 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
11860 IEM_MC_ENDIF();
11861 IEM_MC_USED_FPU();
11862 IEM_MC_ADVANCE_RIP();
11863 IEM_MC_END();
11864 }
11865 else
11866 {
11867 IEM_MC_BEGIN(0, 2);
11868 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11869 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11870 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11871 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11872 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
11873 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11874 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
11875 IEM_MC_ELSE()
11876 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
11877 IEM_MC_ENDIF();
11878 IEM_MC_USED_FPU();
11879 IEM_MC_ADVANCE_RIP();
11880 IEM_MC_END();
11881 }
11882 return VINF_SUCCESS;
11883}
11884
11885
11886/**
11887 * Common worker for FPU instructions working on ST0 and replaces it with the
11888 * result, i.e. unary operators.
11889 *
11890 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11891 */
11892FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
11893{
11894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11895
11896 IEM_MC_BEGIN(2, 1);
11897 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11898 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11899 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11900
11901 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11902 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11903 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
11904 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
11905 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
11906 IEM_MC_ELSE()
11907 IEM_MC_FPU_STACK_UNDERFLOW(0);
11908 IEM_MC_ENDIF();
11909 IEM_MC_USED_FPU();
11910 IEM_MC_ADVANCE_RIP();
11911
11912 IEM_MC_END();
11913 return VINF_SUCCESS;
11914}
11915
11916
11917/** Opcode 0xd9 0xe0. */
11918FNIEMOP_DEF(iemOp_fchs)
11919{
11920 IEMOP_MNEMONIC("fchs st0");
11921 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
11922}
11923
11924
11925/** Opcode 0xd9 0xe1. */
11926FNIEMOP_DEF(iemOp_fabs)
11927{
11928 IEMOP_MNEMONIC("fabs st0");
11929 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
11930}
11931
11932
11933/**
11934 * Common worker for FPU instructions working on ST0 and only returns FSW.
11935 *
11936 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11937 */
11938FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
11939{
11940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11941
11942 IEM_MC_BEGIN(2, 1);
11943 IEM_MC_LOCAL(uint16_t, u16Fsw);
11944 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11945 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
11946
11947 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11948 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11949 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
11950 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
11951 IEM_MC_UPDATE_FSW(u16Fsw);
11952 IEM_MC_ELSE()
11953 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
11954 IEM_MC_ENDIF();
11955 IEM_MC_USED_FPU();
11956 IEM_MC_ADVANCE_RIP();
11957
11958 IEM_MC_END();
11959 return VINF_SUCCESS;
11960}
11961
11962
11963/** Opcode 0xd9 0xe4. */
11964FNIEMOP_DEF(iemOp_ftst)
11965{
11966 IEMOP_MNEMONIC("ftst st0");
11967 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
11968}
11969
11970
11971/** Opcode 0xd9 0xe5. */
11972FNIEMOP_DEF(iemOp_fxam)
11973{
11974 IEMOP_MNEMONIC("fxam st0");
11975 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
11976}
11977
11978
11979/**
11980 * Common worker for FPU instructions pushing a constant onto the FPU stack.
11981 *
11982 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11983 */
11984FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
11985{
11986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11987
11988 IEM_MC_BEGIN(1, 1);
11989 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11990 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11991
11992 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11993 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11994 IEM_MC_IF_FPUREG_IS_EMPTY(7)
11995 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
11996 IEM_MC_PUSH_FPU_RESULT(FpuRes);
11997 IEM_MC_ELSE()
11998 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
11999 IEM_MC_ENDIF();
12000 IEM_MC_USED_FPU();
12001 IEM_MC_ADVANCE_RIP();
12002
12003 IEM_MC_END();
12004 return VINF_SUCCESS;
12005}
12006
12007
12008/** Opcode 0xd9 0xe8. */
12009FNIEMOP_DEF(iemOp_fld1)
12010{
12011 IEMOP_MNEMONIC("fld1");
12012 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
12013}
12014
12015
12016/** Opcode 0xd9 0xe9. */
12017FNIEMOP_DEF(iemOp_fldl2t)
12018{
12019 IEMOP_MNEMONIC("fldl2t");
12020 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
12021}
12022
12023
12024/** Opcode 0xd9 0xea. */
12025FNIEMOP_DEF(iemOp_fldl2e)
12026{
12027 IEMOP_MNEMONIC("fldl2e");
12028 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
12029}
12030
12031/** Opcode 0xd9 0xeb. */
12032FNIEMOP_DEF(iemOp_fldpi)
12033{
12034 IEMOP_MNEMONIC("fldpi");
12035 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
12036}
12037
12038
12039/** Opcode 0xd9 0xec. */
12040FNIEMOP_DEF(iemOp_fldlg2)
12041{
12042 IEMOP_MNEMONIC("fldlg2");
12043 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
12044}
12045
12046/** Opcode 0xd9 0xed. */
12047FNIEMOP_DEF(iemOp_fldln2)
12048{
12049 IEMOP_MNEMONIC("fldln2");
12050 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
12051}
12052
12053
12054/** Opcode 0xd9 0xee. */
12055FNIEMOP_DEF(iemOp_fldz)
12056{
12057 IEMOP_MNEMONIC("fldz");
12058 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
12059}
12060
12061
12062/** Opcode 0xd9 0xf0. */
12063FNIEMOP_DEF(iemOp_f2xm1)
12064{
12065 IEMOP_MNEMONIC("f2xm1 st0");
12066 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
12067}
12068
12069
12070/** Opcode 0xd9 0xf1. */
12071FNIEMOP_DEF(iemOp_fylx2)
12072{
12073 IEMOP_MNEMONIC("fylx2 st0");
12074 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
12075}
12076
12077
12078/**
12079 * Common worker for FPU instructions working on ST0 and having two outputs, one
12080 * replacing ST0 and one pushed onto the stack.
12081 *
12082 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12083 */
12084FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
12085{
12086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12087
12088 IEM_MC_BEGIN(2, 1);
12089 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
12090 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
12091 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
12092
12093 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12094 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12095 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12096 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
12097 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
12098 IEM_MC_ELSE()
12099 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
12100 IEM_MC_ENDIF();
12101 IEM_MC_USED_FPU();
12102 IEM_MC_ADVANCE_RIP();
12103
12104 IEM_MC_END();
12105 return VINF_SUCCESS;
12106}
12107
12108
12109/** Opcode 0xd9 0xf2. */
12110FNIEMOP_DEF(iemOp_fptan)
12111{
12112 IEMOP_MNEMONIC("fptan st0");
12113 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
12114}
12115
12116
12117/**
12118 * Common worker for FPU instructions working on STn and ST0, storing the result
12119 * in STn, and popping the stack unless IE, DE or ZE was raised.
12120 *
12121 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12122 */
12123FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
12124{
12125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12126
12127 IEM_MC_BEGIN(3, 1);
12128 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12129 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12130 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12131 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12132
12133 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12134 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12135
12136 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
12137 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
12138 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
12139 IEM_MC_ELSE()
12140 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
12141 IEM_MC_ENDIF();
12142 IEM_MC_USED_FPU();
12143 IEM_MC_ADVANCE_RIP();
12144
12145 IEM_MC_END();
12146 return VINF_SUCCESS;
12147}
12148
12149
12150/** Opcode 0xd9 0xf3. */
12151FNIEMOP_DEF(iemOp_fpatan)
12152{
12153 IEMOP_MNEMONIC("fpatan st1,st0");
12154 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
12155}
12156
12157
12158/** Opcode 0xd9 0xf4. */
12159FNIEMOP_DEF(iemOp_fxtract)
12160{
12161 IEMOP_MNEMONIC("fxtract st0");
12162 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
12163}
12164
12165
12166/** Opcode 0xd9 0xf5. */
12167FNIEMOP_DEF(iemOp_fprem1)
12168{
12169 IEMOP_MNEMONIC("fprem1 st0, st1");
12170 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
12171}
12172
12173
12174/** Opcode 0xd9 0xf6. */
12175FNIEMOP_DEF(iemOp_fdecstp)
12176{
12177 IEMOP_MNEMONIC("fdecstp");
12178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12179 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
12180 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
12181 * FINCSTP and FDECSTP. */
12182
12183 IEM_MC_BEGIN(0,0);
12184
12185 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12186 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12187
12188 IEM_MC_FPU_STACK_DEC_TOP();
12189 IEM_MC_UPDATE_FSW_CONST(0);
12190
12191 IEM_MC_USED_FPU();
12192 IEM_MC_ADVANCE_RIP();
12193 IEM_MC_END();
12194 return VINF_SUCCESS;
12195}
12196
12197
12198/** Opcode 0xd9 0xf7. */
12199FNIEMOP_DEF(iemOp_fincstp)
12200{
12201 IEMOP_MNEMONIC("fincstp");
12202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12203 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
12204 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
12205 * FINCSTP and FDECSTP. */
12206
12207 IEM_MC_BEGIN(0,0);
12208
12209 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12210 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12211
12212 IEM_MC_FPU_STACK_INC_TOP();
12213 IEM_MC_UPDATE_FSW_CONST(0);
12214
12215 IEM_MC_USED_FPU();
12216 IEM_MC_ADVANCE_RIP();
12217 IEM_MC_END();
12218 return VINF_SUCCESS;
12219}
12220
12221
12222/** Opcode 0xd9 0xf8. */
12223FNIEMOP_DEF(iemOp_fprem)
12224{
12225 IEMOP_MNEMONIC("fprem st0, st1");
12226 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
12227}
12228
12229
12230/** Opcode 0xd9 0xf9. */
12231FNIEMOP_DEF(iemOp_fyl2xp1)
12232{
12233 IEMOP_MNEMONIC("fyl2xp1 st1,st0");
12234 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
12235}
12236
12237
12238/** Opcode 0xd9 0xfa. */
12239FNIEMOP_DEF(iemOp_fsqrt)
12240{
12241 IEMOP_MNEMONIC("fsqrt st0");
12242 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
12243}
12244
12245
12246/** Opcode 0xd9 0xfb. */
12247FNIEMOP_DEF(iemOp_fsincos)
12248{
12249 IEMOP_MNEMONIC("fsincos st0");
12250 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
12251}
12252
12253
12254/** Opcode 0xd9 0xfc. */
12255FNIEMOP_DEF(iemOp_frndint)
12256{
12257 IEMOP_MNEMONIC("frndint st0");
12258 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
12259}
12260
12261
12262/** Opcode 0xd9 0xfd. */
12263FNIEMOP_DEF(iemOp_fscale)
12264{
12265 IEMOP_MNEMONIC("fscale st0, st1");
12266 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
12267}
12268
12269
12270/** Opcode 0xd9 0xfe. */
12271FNIEMOP_DEF(iemOp_fsin)
12272{
12273 IEMOP_MNEMONIC("fsin st0");
12274 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
12275}
12276
12277
12278/** Opcode 0xd9 0xff. */
12279FNIEMOP_DEF(iemOp_fcos)
12280{
12281 IEMOP_MNEMONIC("fcos st0");
12282 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
12283}
12284
12285
12286/** Used by iemOp_EscF1. */
12287static const PFNIEMOP g_apfnEscF1_E0toFF[32] =
12288{
12289 /* 0xe0 */ iemOp_fchs,
12290 /* 0xe1 */ iemOp_fabs,
12291 /* 0xe2 */ iemOp_Invalid,
12292 /* 0xe3 */ iemOp_Invalid,
12293 /* 0xe4 */ iemOp_ftst,
12294 /* 0xe5 */ iemOp_fxam,
12295 /* 0xe6 */ iemOp_Invalid,
12296 /* 0xe7 */ iemOp_Invalid,
12297 /* 0xe8 */ iemOp_fld1,
12298 /* 0xe9 */ iemOp_fldl2t,
12299 /* 0xea */ iemOp_fldl2e,
12300 /* 0xeb */ iemOp_fldpi,
12301 /* 0xec */ iemOp_fldlg2,
12302 /* 0xed */ iemOp_fldln2,
12303 /* 0xee */ iemOp_fldz,
12304 /* 0xef */ iemOp_Invalid,
12305 /* 0xf0 */ iemOp_f2xm1,
12306 /* 0xf1 */ iemOp_fylx2,
12307 /* 0xf2 */ iemOp_fptan,
12308 /* 0xf3 */ iemOp_fpatan,
12309 /* 0xf4 */ iemOp_fxtract,
12310 /* 0xf5 */ iemOp_fprem1,
12311 /* 0xf6 */ iemOp_fdecstp,
12312 /* 0xf7 */ iemOp_fincstp,
12313 /* 0xf8 */ iemOp_fprem,
12314 /* 0xf9 */ iemOp_fyl2xp1,
12315 /* 0xfa */ iemOp_fsqrt,
12316 /* 0xfb */ iemOp_fsincos,
12317 /* 0xfc */ iemOp_frndint,
12318 /* 0xfd */ iemOp_fscale,
12319 /* 0xfe */ iemOp_fsin,
12320 /* 0xff */ iemOp_fcos
12321};
12322
12323
12324/** Opcode 0xd9. */
12325FNIEMOP_DEF(iemOp_EscF1)
12326{
12327 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
12328 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12329 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12330 {
12331 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12332 {
12333 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
12334 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
12335 case 2:
12336 if (bRm == 0xc9)
12337 return FNIEMOP_CALL(iemOp_fnop);
12338 return IEMOP_RAISE_INVALID_OPCODE();
12339 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
12340 case 4:
12341 case 5:
12342 case 6:
12343 case 7:
12344 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
12345 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
12346 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12347 }
12348 }
12349 else
12350 {
12351 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12352 {
12353 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
12354 case 1: return IEMOP_RAISE_INVALID_OPCODE();
12355 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
12356 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
12357 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
12358 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
12359 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
12360 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
12361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12362 }
12363 }
12364}
12365
12366
12367/** Opcode 0xda 11/0. */
12368FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
12369{
12370 IEMOP_MNEMONIC("fcmovb st0,stN");
12371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12372
12373 IEM_MC_BEGIN(0, 1);
12374 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12375
12376 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12377 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12378
12379 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
12380 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
12381 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12382 IEM_MC_ENDIF();
12383 IEM_MC_UPDATE_FPU_OPCODE_IP();
12384 IEM_MC_ELSE()
12385 IEM_MC_FPU_STACK_UNDERFLOW(0);
12386 IEM_MC_ENDIF();
12387 IEM_MC_USED_FPU();
12388 IEM_MC_ADVANCE_RIP();
12389
12390 IEM_MC_END();
12391 return VINF_SUCCESS;
12392}
12393
12394
12395/** Opcode 0xda 11/1. */
12396FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
12397{
12398 IEMOP_MNEMONIC("fcmove st0,stN");
12399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12400
12401 IEM_MC_BEGIN(0, 1);
12402 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12403
12404 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12405 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12406
12407 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
12408 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
12409 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12410 IEM_MC_ENDIF();
12411 IEM_MC_UPDATE_FPU_OPCODE_IP();
12412 IEM_MC_ELSE()
12413 IEM_MC_FPU_STACK_UNDERFLOW(0);
12414 IEM_MC_ENDIF();
12415 IEM_MC_USED_FPU();
12416 IEM_MC_ADVANCE_RIP();
12417
12418 IEM_MC_END();
12419 return VINF_SUCCESS;
12420}
12421
12422
12423/** Opcode 0xda 11/2. */
12424FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
12425{
12426 IEMOP_MNEMONIC("fcmovbe st0,stN");
12427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12428
12429 IEM_MC_BEGIN(0, 1);
12430 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12431
12432 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12433 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12434
12435 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
12436 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
12437 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12438 IEM_MC_ENDIF();
12439 IEM_MC_UPDATE_FPU_OPCODE_IP();
12440 IEM_MC_ELSE()
12441 IEM_MC_FPU_STACK_UNDERFLOW(0);
12442 IEM_MC_ENDIF();
12443 IEM_MC_USED_FPU();
12444 IEM_MC_ADVANCE_RIP();
12445
12446 IEM_MC_END();
12447 return VINF_SUCCESS;
12448}
12449
12450
12451/** Opcode 0xda 11/3. */
12452FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
12453{
12454 IEMOP_MNEMONIC("fcmovu st0,stN");
12455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12456
12457 IEM_MC_BEGIN(0, 1);
12458 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12459
12460 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12461 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12462
12463 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
12464 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
12465 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12466 IEM_MC_ENDIF();
12467 IEM_MC_UPDATE_FPU_OPCODE_IP();
12468 IEM_MC_ELSE()
12469 IEM_MC_FPU_STACK_UNDERFLOW(0);
12470 IEM_MC_ENDIF();
12471 IEM_MC_USED_FPU();
12472 IEM_MC_ADVANCE_RIP();
12473
12474 IEM_MC_END();
12475 return VINF_SUCCESS;
12476}
12477
12478
12479/**
12480 * Common worker for FPU instructions working on ST0 and STn, only affecting
12481 * flags, and popping twice when done.
12482 *
12483 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12484 */
12485FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
12486{
12487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12488
12489 IEM_MC_BEGIN(3, 1);
12490 IEM_MC_LOCAL(uint16_t, u16Fsw);
12491 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12492 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12493 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12494
12495 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12496 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12497 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
12498 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
12499 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
12500 IEM_MC_ELSE()
12501 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
12502 IEM_MC_ENDIF();
12503 IEM_MC_USED_FPU();
12504 IEM_MC_ADVANCE_RIP();
12505
12506 IEM_MC_END();
12507 return VINF_SUCCESS;
12508}
12509
12510
12511/** Opcode 0xda 0xe9. */
12512FNIEMOP_DEF(iemOp_fucompp)
12513{
12514 IEMOP_MNEMONIC("fucompp st0,stN");
12515 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
12516}
12517
12518
12519/**
12520 * Common worker for FPU instructions working on ST0 and an m32i, and storing
12521 * the result in ST0.
12522 *
12523 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12524 */
12525FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
12526{
12527 IEM_MC_BEGIN(3, 3);
12528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12529 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12530 IEM_MC_LOCAL(int32_t, i32Val2);
12531 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12532 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12533 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
12534
12535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
12536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12537
12538 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12539 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12540 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12541
12542 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12543 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
12544 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
12545 IEM_MC_ELSE()
12546 IEM_MC_FPU_STACK_UNDERFLOW(0);
12547 IEM_MC_ENDIF();
12548 IEM_MC_USED_FPU();
12549 IEM_MC_ADVANCE_RIP();
12550
12551 IEM_MC_END();
12552 return VINF_SUCCESS;
12553}
12554
12555
12556/** Opcode 0xda !11/0. */
12557FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
12558{
12559 IEMOP_MNEMONIC("fiadd m32i");
12560 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
12561}
12562
12563
12564/** Opcode 0xda !11/1. */
12565FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
12566{
12567 IEMOP_MNEMONIC("fimul m32i");
12568 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
12569}
12570
12571
12572/** Opcode 0xda !11/2. */
12573FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
12574{
12575 IEMOP_MNEMONIC("ficom st0,m32i");
12576
12577 IEM_MC_BEGIN(3, 3);
12578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12579 IEM_MC_LOCAL(uint16_t, u16Fsw);
12580 IEM_MC_LOCAL(int32_t, i32Val2);
12581 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12582 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12583 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
12584
12585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
12586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12587
12588 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12589 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12590 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12591
12592 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12593 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
12594 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
12595 IEM_MC_ELSE()
12596 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
12597 IEM_MC_ENDIF();
12598 IEM_MC_USED_FPU();
12599 IEM_MC_ADVANCE_RIP();
12600
12601 IEM_MC_END();
12602 return VINF_SUCCESS;
12603}
12604
12605
12606/** Opcode 0xda !11/3. */
12607FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
12608{
12609 IEMOP_MNEMONIC("ficomp st0,m32i");
12610
12611 IEM_MC_BEGIN(3, 3);
12612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12613 IEM_MC_LOCAL(uint16_t, u16Fsw);
12614 IEM_MC_LOCAL(int32_t, i32Val2);
12615 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12616 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12617 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
12618
12619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
12620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12621
12622 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12623 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12624 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12625
12626 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12627 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
12628 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
12629 IEM_MC_ELSE()
12630 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
12631 IEM_MC_ENDIF();
12632 IEM_MC_USED_FPU();
12633 IEM_MC_ADVANCE_RIP();
12634
12635 IEM_MC_END();
12636 return VINF_SUCCESS;
12637}
12638
12639
12640/** Opcode 0xda !11/4. */
12641FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
12642{
12643 IEMOP_MNEMONIC("fisub m32i");
12644 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
12645}
12646
12647
12648/** Opcode 0xda !11/5. */
12649FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
12650{
12651 IEMOP_MNEMONIC("fisubr m32i");
12652 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
12653}
12654
12655
12656/** Opcode 0xda !11/6. */
12657FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
12658{
12659 IEMOP_MNEMONIC("fidiv m32i");
12660 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
12661}
12662
12663
12664/** Opcode 0xda !11/7. */
12665FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
12666{
12667 IEMOP_MNEMONIC("fidivr m32i");
12668 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
12669}
12670
12671
12672/** Opcode 0xda. */
12673FNIEMOP_DEF(iemOp_EscF2)
12674{
12675 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
12676 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12677 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12678 {
12679 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12680 {
12681 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
12682 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
12683 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
12684 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
12685 case 4: return IEMOP_RAISE_INVALID_OPCODE();
12686 case 5:
12687 if (bRm == 0xe9)
12688 return FNIEMOP_CALL(iemOp_fucompp);
12689 return IEMOP_RAISE_INVALID_OPCODE();
12690 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12691 case 7: return IEMOP_RAISE_INVALID_OPCODE();
12692 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12693 }
12694 }
12695 else
12696 {
12697 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12698 {
12699 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
12700 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
12701 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
12702 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
12703 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
12704 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
12705 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
12706 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
12707 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12708 }
12709 }
12710}
12711
12712
12713/** Opcode 0xdb !11/0. */
12714FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
12715{
12716 IEMOP_MNEMONIC("fild m32i");
12717
12718 IEM_MC_BEGIN(2, 3);
12719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12720 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12721 IEM_MC_LOCAL(int32_t, i32Val);
12722 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12723 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
12724
12725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
12726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12727
12728 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12729 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12730 IEM_MC_FETCH_MEM_I32(i32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
12731
12732 IEM_MC_IF_FPUREG_IS_EMPTY(7)
12733 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
12734 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
12735 IEM_MC_ELSE()
12736 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
12737 IEM_MC_ENDIF();
12738 IEM_MC_USED_FPU();
12739 IEM_MC_ADVANCE_RIP();
12740
12741 IEM_MC_END();
12742 return VINF_SUCCESS;
12743}
12744
12745
12746/** Opcode 0xdb !11/1. */
12747FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
12748{
12749 IEMOP_MNEMONIC("fisttp m32i");
12750 IEM_MC_BEGIN(3, 2);
12751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12752 IEM_MC_LOCAL(uint16_t, u16Fsw);
12753 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12754 IEM_MC_ARG(int32_t *, pi32Dst, 1);
12755 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12756
12757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
12758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12759 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12760 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12761
12762 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
12763 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12764 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
12765 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
12766 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
12767 IEM_MC_ELSE()
12768 IEM_MC_IF_FCW_IM()
12769 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
12770 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
12771 IEM_MC_ENDIF();
12772 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
12773 IEM_MC_ENDIF();
12774 IEM_MC_USED_FPU();
12775 IEM_MC_ADVANCE_RIP();
12776
12777 IEM_MC_END();
12778 return VINF_SUCCESS;
12779}
12780
12781
12782/** Opcode 0xdb !11/2. */
12783FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
12784{
12785 IEMOP_MNEMONIC("fist m32i");
12786 IEM_MC_BEGIN(3, 2);
12787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12788 IEM_MC_LOCAL(uint16_t, u16Fsw);
12789 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12790 IEM_MC_ARG(int32_t *, pi32Dst, 1);
12791 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12792
12793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
12794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12795 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12796 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12797
12798 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
12799 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12800 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
12801 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
12802 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
12803 IEM_MC_ELSE()
12804 IEM_MC_IF_FCW_IM()
12805 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
12806 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
12807 IEM_MC_ENDIF();
12808 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
12809 IEM_MC_ENDIF();
12810 IEM_MC_USED_FPU();
12811 IEM_MC_ADVANCE_RIP();
12812
12813 IEM_MC_END();
12814 return VINF_SUCCESS;
12815}
12816
12817
12818/** Opcode 0xdb !11/3. */
12819FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
12820{
12821 IEMOP_MNEMONIC("fisttp m32i");
12822 IEM_MC_BEGIN(3, 2);
12823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12824 IEM_MC_LOCAL(uint16_t, u16Fsw);
12825 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12826 IEM_MC_ARG(int32_t *, pi32Dst, 1);
12827 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12828
12829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
12830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12831 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12832 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12833
12834 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
12835 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12836 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
12837 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
12838 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
12839 IEM_MC_ELSE()
12840 IEM_MC_IF_FCW_IM()
12841 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
12842 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
12843 IEM_MC_ENDIF();
12844 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
12845 IEM_MC_ENDIF();
12846 IEM_MC_USED_FPU();
12847 IEM_MC_ADVANCE_RIP();
12848
12849 IEM_MC_END();
12850 return VINF_SUCCESS;
12851}
12852
12853
12854/** Opcode 0xdb !11/5. */
12855FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
12856{
12857 IEMOP_MNEMONIC("fld m80r");
12858
12859 IEM_MC_BEGIN(2, 3);
12860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12861 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12862 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
12863 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12864 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
12865
12866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
12867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12868
12869 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12870 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12871 IEM_MC_FETCH_MEM_R80(r80Val, pIemCpu->iEffSeg, GCPtrEffSrc);
12872
12873 IEM_MC_IF_FPUREG_IS_EMPTY(7)
12874 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
12875 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
12876 IEM_MC_ELSE()
12877 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
12878 IEM_MC_ENDIF();
12879 IEM_MC_USED_FPU();
12880 IEM_MC_ADVANCE_RIP();
12881
12882 IEM_MC_END();
12883 return VINF_SUCCESS;
12884}
12885
12886
12887/** Opcode 0xdb !11/7. */
12888FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
12889{
12890 IEMOP_MNEMONIC("fstp m80r");
12891 IEM_MC_BEGIN(3, 2);
12892 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12893 IEM_MC_LOCAL(uint16_t, u16Fsw);
12894 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12895 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
12896 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12897
12898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
12899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12900 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12901 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12902
12903 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
12904 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
12905 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
12906 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
12907 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
12908 IEM_MC_ELSE()
12909 IEM_MC_IF_FCW_IM()
12910 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
12911 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
12912 IEM_MC_ENDIF();
12913 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
12914 IEM_MC_ENDIF();
12915 IEM_MC_USED_FPU();
12916 IEM_MC_ADVANCE_RIP();
12917
12918 IEM_MC_END();
12919 return VINF_SUCCESS;
12920}
12921
12922
12923/** Opcode 0xdb 11/0. */
12924FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
12925{
12926 IEMOP_MNEMONIC("fcmovnb st0,stN");
12927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12928
12929 IEM_MC_BEGIN(0, 1);
12930 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12931
12932 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12933 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12934
12935 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
12936 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
12937 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12938 IEM_MC_ENDIF();
12939 IEM_MC_UPDATE_FPU_OPCODE_IP();
12940 IEM_MC_ELSE()
12941 IEM_MC_FPU_STACK_UNDERFLOW(0);
12942 IEM_MC_ENDIF();
12943 IEM_MC_USED_FPU();
12944 IEM_MC_ADVANCE_RIP();
12945
12946 IEM_MC_END();
12947 return VINF_SUCCESS;
12948}
12949
12950
12951/** Opcode 0xdb 11/1. */
12952FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
12953{
12954 IEMOP_MNEMONIC("fcmovne st0,stN");
12955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12956
12957 IEM_MC_BEGIN(0, 1);
12958 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12959
12960 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12961 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12962
12963 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
12964 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
12965 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12966 IEM_MC_ENDIF();
12967 IEM_MC_UPDATE_FPU_OPCODE_IP();
12968 IEM_MC_ELSE()
12969 IEM_MC_FPU_STACK_UNDERFLOW(0);
12970 IEM_MC_ENDIF();
12971 IEM_MC_USED_FPU();
12972 IEM_MC_ADVANCE_RIP();
12973
12974 IEM_MC_END();
12975 return VINF_SUCCESS;
12976}
12977
12978
12979/** Opcode 0xdb 11/2. */
12980FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
12981{
12982 IEMOP_MNEMONIC("fcmovnbe st0,stN");
12983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12984
12985 IEM_MC_BEGIN(0, 1);
12986 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
12987
12988 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12989 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12990
12991 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
12992 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
12993 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
12994 IEM_MC_ENDIF();
12995 IEM_MC_UPDATE_FPU_OPCODE_IP();
12996 IEM_MC_ELSE()
12997 IEM_MC_FPU_STACK_UNDERFLOW(0);
12998 IEM_MC_ENDIF();
12999 IEM_MC_USED_FPU();
13000 IEM_MC_ADVANCE_RIP();
13001
13002 IEM_MC_END();
13003 return VINF_SUCCESS;
13004}
13005
13006
13007/** Opcode 0xdb 11/3. */
13008FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
13009{
13010 IEMOP_MNEMONIC("fcmovnnu st0,stN");
13011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13012
13013 IEM_MC_BEGIN(0, 1);
13014 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13015
13016 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13017 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13018
13019 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13020 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
13021 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13022 IEM_MC_ENDIF();
13023 IEM_MC_UPDATE_FPU_OPCODE_IP();
13024 IEM_MC_ELSE()
13025 IEM_MC_FPU_STACK_UNDERFLOW(0);
13026 IEM_MC_ENDIF();
13027 IEM_MC_USED_FPU();
13028 IEM_MC_ADVANCE_RIP();
13029
13030 IEM_MC_END();
13031 return VINF_SUCCESS;
13032}
13033
13034
13035/** Opcode 0xdb 0xe0. */
13036FNIEMOP_DEF(iemOp_fneni)
13037{
13038 IEMOP_MNEMONIC("fneni (8087/ign)");
13039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13040 IEM_MC_BEGIN(0,0);
13041 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13042 IEM_MC_ADVANCE_RIP();
13043 IEM_MC_END();
13044 return VINF_SUCCESS;
13045}
13046
13047
13048/** Opcode 0xdb 0xe1. */
13049FNIEMOP_DEF(iemOp_fndisi)
13050{
13051 IEMOP_MNEMONIC("fndisi (8087/ign)");
13052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13053 IEM_MC_BEGIN(0,0);
13054 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13055 IEM_MC_ADVANCE_RIP();
13056 IEM_MC_END();
13057 return VINF_SUCCESS;
13058}
13059
13060
13061/** Opcode 0xdb 0xe2. */
13062FNIEMOP_DEF(iemOp_fnclex)
13063{
13064 IEMOP_MNEMONIC("fnclex");
13065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13066
13067 IEM_MC_BEGIN(0,0);
13068 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13069 IEM_MC_CLEAR_FSW_EX();
13070 IEM_MC_ADVANCE_RIP();
13071 IEM_MC_END();
13072 return VINF_SUCCESS;
13073}
13074
13075
13076/** Opcode 0xdb 0xe3. */
13077FNIEMOP_DEF(iemOp_fninit)
13078{
13079 IEMOP_MNEMONIC("fninit");
13080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13081 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
13082}
13083
13084
13085/** Opcode 0xdb 0xe4. */
13086FNIEMOP_DEF(iemOp_fnsetpm)
13087{
13088 IEMOP_MNEMONIC("fnsetpm (80287/ign)"); /* set protected mode on fpu. */
13089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13090 IEM_MC_BEGIN(0,0);
13091 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13092 IEM_MC_ADVANCE_RIP();
13093 IEM_MC_END();
13094 return VINF_SUCCESS;
13095}
13096
13097
13098/** Opcode 0xdb 0xe5. */
13099FNIEMOP_DEF(iemOp_frstpm)
13100{
13101 IEMOP_MNEMONIC("frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
13102#if 0 /* #UDs on newer CPUs */
13103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13104 IEM_MC_BEGIN(0,0);
13105 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13106 IEM_MC_ADVANCE_RIP();
13107 IEM_MC_END();
13108 return VINF_SUCCESS;
13109#else
13110 return IEMOP_RAISE_INVALID_OPCODE();
13111#endif
13112}
13113
13114
13115/** Opcode 0xdb 11/5. */
13116FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
13117{
13118 IEMOP_MNEMONIC("fucomi st0,stN");
13119 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
13120}
13121
13122
13123/** Opcode 0xdb 11/6. */
13124FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
13125{
13126 IEMOP_MNEMONIC("fcomi st0,stN");
13127 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
13128}
13129
13130
13131/** Opcode 0xdb. */
13132FNIEMOP_DEF(iemOp_EscF3)
13133{
13134 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13135 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13136 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13137 {
13138 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13139 {
13140 case 0: FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
13141 case 1: FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
13142 case 2: FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
13143 case 3: FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
13144 case 4:
13145 switch (bRm)
13146 {
13147 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
13148 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
13149 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
13150 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
13151 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
13152 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
13153 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
13154 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
13155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13156 }
13157 break;
13158 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
13159 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
13160 case 7: return IEMOP_RAISE_INVALID_OPCODE();
13161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13162 }
13163 }
13164 else
13165 {
13166 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13167 {
13168 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
13169 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
13170 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
13171 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
13172 case 4: return IEMOP_RAISE_INVALID_OPCODE();
13173 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
13174 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13175 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
13176 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13177 }
13178 }
13179}
13180
13181
13182/**
13183 * Common worker for FPU instructions working on STn and ST0, and storing the
13184 * result in STn unless IE, DE or ZE was raised.
13185 *
13186 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13187 */
13188FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13189{
13190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13191
13192 IEM_MC_BEGIN(3, 1);
13193 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13194 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13195 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13196 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13197
13198 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13199 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13200
13201 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
13202 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13203 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
13204 IEM_MC_ELSE()
13205 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
13206 IEM_MC_ENDIF();
13207 IEM_MC_USED_FPU();
13208 IEM_MC_ADVANCE_RIP();
13209
13210 IEM_MC_END();
13211 return VINF_SUCCESS;
13212}
13213
13214
13215/** Opcode 0xdc 11/0. */
13216FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
13217{
13218 IEMOP_MNEMONIC("fadd stN,st0");
13219 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
13220}
13221
13222
13223/** Opcode 0xdc 11/1. */
13224FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
13225{
13226 IEMOP_MNEMONIC("fmul stN,st0");
13227 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
13228}
13229
13230
13231/** Opcode 0xdc 11/4. */
13232FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
13233{
13234 IEMOP_MNEMONIC("fsubr stN,st0");
13235 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
13236}
13237
13238
13239/** Opcode 0xdc 11/5. */
13240FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
13241{
13242 IEMOP_MNEMONIC("fsub stN,st0");
13243 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
13244}
13245
13246
13247/** Opcode 0xdc 11/6. */
13248FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
13249{
13250 IEMOP_MNEMONIC("fdivr stN,st0");
13251 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
13252}
13253
13254
13255/** Opcode 0xdc 11/7. */
13256FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
13257{
13258 IEMOP_MNEMONIC("fdiv stN,st0");
13259 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
13260}
13261
13262
13263/**
13264 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
13265 * memory operand, and storing the result in ST0.
13266 *
13267 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13268 */
13269FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
13270{
13271 IEM_MC_BEGIN(3, 3);
13272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13273 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13274 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
13275 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13276 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
13277 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
13278
13279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
13280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13281 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13282 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13283
13284 IEM_MC_FETCH_MEM_R64(r64Factor2, pIemCpu->iEffSeg, GCPtrEffSrc);
13285 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
13286 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
13287 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pIemCpu->iEffSeg, GCPtrEffSrc);
13288 IEM_MC_ELSE()
13289 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pIemCpu->iEffSeg, GCPtrEffSrc);
13290 IEM_MC_ENDIF();
13291 IEM_MC_USED_FPU();
13292 IEM_MC_ADVANCE_RIP();
13293
13294 IEM_MC_END();
13295 return VINF_SUCCESS;
13296}
13297
13298
13299/** Opcode 0xdc !11/0. */
13300FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
13301{
13302 IEMOP_MNEMONIC("fadd m64r");
13303 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
13304}
13305
13306
13307/** Opcode 0xdc !11/1. */
13308FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
13309{
13310 IEMOP_MNEMONIC("fmul m64r");
13311 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
13312}
13313
13314
13315/** Opcode 0xdc !11/2. */
13316FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
13317{
13318 IEMOP_MNEMONIC("fcom st0,m64r");
13319
13320 IEM_MC_BEGIN(3, 3);
13321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13322 IEM_MC_LOCAL(uint16_t, u16Fsw);
13323 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
13324 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13325 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13326 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
13327
13328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
13329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13330
13331 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13332 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13333 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13334
13335 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13336 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
13337 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13338 IEM_MC_ELSE()
13339 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13340 IEM_MC_ENDIF();
13341 IEM_MC_USED_FPU();
13342 IEM_MC_ADVANCE_RIP();
13343
13344 IEM_MC_END();
13345 return VINF_SUCCESS;
13346}
13347
13348
13349/** Opcode 0xdc !11/3. */
13350FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
13351{
13352 IEMOP_MNEMONIC("fcomp st0,m64r");
13353
13354 IEM_MC_BEGIN(3, 3);
13355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13356 IEM_MC_LOCAL(uint16_t, u16Fsw);
13357 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
13358 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13359 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13360 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
13361
13362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
13363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13364
13365 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13366 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13367 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13368
13369 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13370 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
13371 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13372 IEM_MC_ELSE()
13373 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13374 IEM_MC_ENDIF();
13375 IEM_MC_USED_FPU();
13376 IEM_MC_ADVANCE_RIP();
13377
13378 IEM_MC_END();
13379 return VINF_SUCCESS;
13380}
13381
13382
13383/** Opcode 0xdc !11/4. */
13384FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
13385{
13386 IEMOP_MNEMONIC("fsub m64r");
13387 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
13388}
13389
13390
13391/** Opcode 0xdc !11/5. */
13392FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
13393{
13394 IEMOP_MNEMONIC("fsubr m64r");
13395 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
13396}
13397
13398
13399/** Opcode 0xdc !11/6. */
13400FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
13401{
13402 IEMOP_MNEMONIC("fdiv m64r");
13403 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
13404}
13405
13406
13407/** Opcode 0xdc !11/7. */
13408FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
13409{
13410 IEMOP_MNEMONIC("fdivr m64r");
13411 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
13412}
13413
13414
13415/** Opcode 0xdc. */
13416FNIEMOP_DEF(iemOp_EscF4)
13417{
13418 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13419 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13420 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13421 {
13422 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13423 {
13424 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
13425 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
13426 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
13427 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
13428 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
13429 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
13430 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
13431 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
13432 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13433 }
13434 }
13435 else
13436 {
13437 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13438 {
13439 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
13440 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
13441 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
13442 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
13443 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
13444 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
13445 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
13446 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
13447 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13448 }
13449 }
13450}
13451
13452
13453/** Opcode 0xdd !11/0.
13454 * @sa iemOp_fld_m32r */
13455FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
13456{
13457 IEMOP_MNEMONIC("fld m64r");
13458
13459 IEM_MC_BEGIN(2, 3);
13460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13461 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13462 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
13463 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13464 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
13465
13466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
13467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13468 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13469 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13470
13471 IEM_MC_FETCH_MEM_R64(r64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
13472 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13473 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
13474 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
13475 IEM_MC_ELSE()
13476 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
13477 IEM_MC_ENDIF();
13478 IEM_MC_USED_FPU();
13479 IEM_MC_ADVANCE_RIP();
13480
13481 IEM_MC_END();
13482 return VINF_SUCCESS;
13483}
13484
13485
13486/** Opcode 0xdd !11/0. */
13487FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
13488{
13489 IEMOP_MNEMONIC("fisttp m64i");
13490 IEM_MC_BEGIN(3, 2);
13491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13492 IEM_MC_LOCAL(uint16_t, u16Fsw);
13493 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13494 IEM_MC_ARG(int64_t *, pi64Dst, 1);
13495 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13496
13497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13499 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13500 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13501
13502 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13503 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13504 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
13505 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
13506 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13507 IEM_MC_ELSE()
13508 IEM_MC_IF_FCW_IM()
13509 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
13510 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
13511 IEM_MC_ENDIF();
13512 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13513 IEM_MC_ENDIF();
13514 IEM_MC_USED_FPU();
13515 IEM_MC_ADVANCE_RIP();
13516
13517 IEM_MC_END();
13518 return VINF_SUCCESS;
13519}
13520
13521
13522/** Opcode 0xdd !11/0. */
13523FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
13524{
13525 IEMOP_MNEMONIC("fst m64r");
13526 IEM_MC_BEGIN(3, 2);
13527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13528 IEM_MC_LOCAL(uint16_t, u16Fsw);
13529 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13530 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
13531 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13532
13533 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13535 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13536 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13537
13538 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13539 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13540 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
13541 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
13542 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13543 IEM_MC_ELSE()
13544 IEM_MC_IF_FCW_IM()
13545 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
13546 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
13547 IEM_MC_ENDIF();
13548 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13549 IEM_MC_ENDIF();
13550 IEM_MC_USED_FPU();
13551 IEM_MC_ADVANCE_RIP();
13552
13553 IEM_MC_END();
13554 return VINF_SUCCESS;
13555}
13556
13557
13558
13559
13560/** Opcode 0xdd !11/0. */
13561FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
13562{
13563 IEMOP_MNEMONIC("fstp m64r");
13564 IEM_MC_BEGIN(3, 2);
13565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13566 IEM_MC_LOCAL(uint16_t, u16Fsw);
13567 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13568 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
13569 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13570
13571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13573 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13574 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13575
13576 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13577 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13578 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
13579 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
13580 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13581 IEM_MC_ELSE()
13582 IEM_MC_IF_FCW_IM()
13583 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
13584 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
13585 IEM_MC_ENDIF();
13586 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13587 IEM_MC_ENDIF();
13588 IEM_MC_USED_FPU();
13589 IEM_MC_ADVANCE_RIP();
13590
13591 IEM_MC_END();
13592 return VINF_SUCCESS;
13593}
13594
13595
13596/** Opcode 0xdd !11/0. */
13597FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
13598{
13599 IEMOP_MNEMONIC("fxrstor m94/108byte");
13600 IEM_MC_BEGIN(3, 0);
13601 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13602 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pIemCpu->iEffSeg, 1);
13603 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
13605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13606 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13607 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13608 IEM_MC_END();
13609 return VINF_SUCCESS;
13610}
13611
13612
13613/** Opcode 0xdd !11/0. */
13614FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
13615{
13616 IEMOP_MNEMONIC("fnsave m94/108byte");
13617 IEM_MC_BEGIN(3, 0);
13618 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13619 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pIemCpu->iEffSeg, 1);
13620 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13623 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13624 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
13625 IEM_MC_END();
13626 return VINF_SUCCESS;
13627
13628}
13629
13630/** Opcode 0xdd !11/0. */
13631FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
13632{
13633 IEMOP_MNEMONIC("fnstsw m16");
13634
13635 IEM_MC_BEGIN(0, 2);
13636 IEM_MC_LOCAL(uint16_t, u16Tmp);
13637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13638
13639 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13641
13642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
13643 IEM_MC_FETCH_FSW(u16Tmp);
13644 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
13645 IEM_MC_ADVANCE_RIP();
13646
13647/** @todo Debug / drop a hint to the verifier that things may differ
13648 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
13649 * NT4SP1. (X86_FSW_PE) */
13650 IEM_MC_END();
13651 return VINF_SUCCESS;
13652}
13653
13654
13655/** Opcode 0xdd 11/0. */
13656FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
13657{
13658 IEMOP_MNEMONIC("ffree stN");
13659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13660 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
13661 unmodified. */
13662
13663 IEM_MC_BEGIN(0, 0);
13664
13665 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13666 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13667
13668 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
13669 IEM_MC_UPDATE_FPU_OPCODE_IP();
13670
13671 IEM_MC_USED_FPU();
13672 IEM_MC_ADVANCE_RIP();
13673 IEM_MC_END();
13674 return VINF_SUCCESS;
13675}
13676
13677
13678/** Opcode 0xdd 11/1. */
13679FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
13680{
13681 IEMOP_MNEMONIC("fst st0,stN");
13682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13683
13684 IEM_MC_BEGIN(0, 2);
13685 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13686 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13687 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13688 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13689 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13690 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13691 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
13692 IEM_MC_ELSE()
13693 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
13694 IEM_MC_ENDIF();
13695 IEM_MC_USED_FPU();
13696 IEM_MC_ADVANCE_RIP();
13697 IEM_MC_END();
13698 return VINF_SUCCESS;
13699}
13700
13701
13702/** Opcode 0xdd 11/3. */
13703FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
13704{
13705 IEMOP_MNEMONIC("fcom st0,stN");
13706 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
13707}
13708
13709
13710/** Opcode 0xdd 11/4. */
13711FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
13712{
13713 IEMOP_MNEMONIC("fcomp st0,stN");
13714 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
13715}
13716
13717
13718/** Opcode 0xdd. */
13719FNIEMOP_DEF(iemOp_EscF5)
13720{
13721 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13722 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13723 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13724 {
13725 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13726 {
13727 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
13728 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
13729 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
13730 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
13731 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
13732 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
13733 case 6: return IEMOP_RAISE_INVALID_OPCODE();
13734 case 7: return IEMOP_RAISE_INVALID_OPCODE();
13735 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13736 }
13737 }
13738 else
13739 {
13740 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13741 {
13742 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
13743 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
13744 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
13745 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
13746 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
13747 case 5: return IEMOP_RAISE_INVALID_OPCODE();
13748 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
13749 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
13750 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13751 }
13752 }
13753}
13754
13755
13756/** Opcode 0xde 11/0. */
13757FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
13758{
13759 IEMOP_MNEMONIC("faddp stN,st0");
13760 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
13761}
13762
13763
13764/** Opcode 0xde 11/0. */
13765FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
13766{
13767 IEMOP_MNEMONIC("fmulp stN,st0");
13768 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
13769}
13770
13771
13772/** Opcode 0xde 0xd9. */
13773FNIEMOP_DEF(iemOp_fcompp)
13774{
13775 IEMOP_MNEMONIC("fucompp st0,stN");
13776 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
13777}
13778
13779
13780/** Opcode 0xde 11/4. */
13781FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
13782{
13783 IEMOP_MNEMONIC("fsubrp stN,st0");
13784 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
13785}
13786
13787
13788/** Opcode 0xde 11/5. */
13789FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
13790{
13791 IEMOP_MNEMONIC("fsubp stN,st0");
13792 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
13793}
13794
13795
13796/** Opcode 0xde 11/6. */
13797FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
13798{
13799 IEMOP_MNEMONIC("fdivrp stN,st0");
13800 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
13801}
13802
13803
13804/** Opcode 0xde 11/7. */
13805FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
13806{
13807 IEMOP_MNEMONIC("fdivp stN,st0");
13808 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
13809}
13810
13811
13812/**
13813 * Common worker for FPU instructions working on ST0 and an m16i, and storing
13814 * the result in ST0.
13815 *
13816 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13817 */
13818FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
13819{
13820 IEM_MC_BEGIN(3, 3);
13821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13822 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13823 IEM_MC_LOCAL(int16_t, i16Val2);
13824 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13825 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13826 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
13827
13828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
13829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13830
13831 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13832 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13833 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13834
13835 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13836 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
13837 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13838 IEM_MC_ELSE()
13839 IEM_MC_FPU_STACK_UNDERFLOW(0);
13840 IEM_MC_ENDIF();
13841 IEM_MC_USED_FPU();
13842 IEM_MC_ADVANCE_RIP();
13843
13844 IEM_MC_END();
13845 return VINF_SUCCESS;
13846}
13847
13848
13849/** Opcode 0xde !11/0. */
13850FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
13851{
13852 IEMOP_MNEMONIC("fiadd m16i");
13853 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
13854}
13855
13856
13857/** Opcode 0xde !11/1. */
13858FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
13859{
13860 IEMOP_MNEMONIC("fimul m16i");
13861 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
13862}
13863
13864
13865/** Opcode 0xde !11/2. */
13866FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
13867{
13868 IEMOP_MNEMONIC("ficom st0,m16i");
13869
13870 IEM_MC_BEGIN(3, 3);
13871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13872 IEM_MC_LOCAL(uint16_t, u16Fsw);
13873 IEM_MC_LOCAL(int16_t, i16Val2);
13874 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13875 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13876 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
13877
13878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
13879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13880
13881 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13882 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13883 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13884
13885 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13886 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
13887 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13888 IEM_MC_ELSE()
13889 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13890 IEM_MC_ENDIF();
13891 IEM_MC_USED_FPU();
13892 IEM_MC_ADVANCE_RIP();
13893
13894 IEM_MC_END();
13895 return VINF_SUCCESS;
13896}
13897
13898
13899/** Opcode 0xde !11/3. */
13900FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
13901{
13902 IEMOP_MNEMONIC("ficomp st0,m16i");
13903
13904 IEM_MC_BEGIN(3, 3);
13905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13906 IEM_MC_LOCAL(uint16_t, u16Fsw);
13907 IEM_MC_LOCAL(int16_t, i16Val2);
13908 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13909 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13910 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
13911
13912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
13913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13914
13915 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13916 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13917 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
13918
13919 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
13920 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
13921 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13922 IEM_MC_ELSE()
13923 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
13924 IEM_MC_ENDIF();
13925 IEM_MC_USED_FPU();
13926 IEM_MC_ADVANCE_RIP();
13927
13928 IEM_MC_END();
13929 return VINF_SUCCESS;
13930}
13931
13932
13933/** Opcode 0xde !11/4. */
13934FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
13935{
13936 IEMOP_MNEMONIC("fisub m16i");
13937 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
13938}
13939
13940
13941/** Opcode 0xde !11/5. */
13942FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
13943{
13944 IEMOP_MNEMONIC("fisubr m16i");
13945 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
13946}
13947
13948
13949/** Opcode 0xde !11/6. */
13950FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
13951{
13952 IEMOP_MNEMONIC("fiadd m16i");
13953 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
13954}
13955
13956
13957/** Opcode 0xde !11/7. */
13958FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
13959{
13960 IEMOP_MNEMONIC("fiadd m16i");
13961 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
13962}
13963
13964
13965/** Opcode 0xde. */
13966FNIEMOP_DEF(iemOp_EscF6)
13967{
13968 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13970 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13971 {
13972 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13973 {
13974 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
13975 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
13976 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13977 case 3: if (bRm == 0xd9)
13978 return FNIEMOP_CALL(iemOp_fcompp);
13979 return IEMOP_RAISE_INVALID_OPCODE();
13980 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
13981 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
13982 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
13983 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
13984 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13985 }
13986 }
13987 else
13988 {
13989 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13990 {
13991 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
13992 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
13993 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
13994 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
13995 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
13996 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
13997 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
13998 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
13999 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14000 }
14001 }
14002}
14003
14004
14005/** Opcode 0xdf 11/0.
14006 * Undocument instruction, assumed to work like ffree + fincstp. */
14007FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
14008{
14009 IEMOP_MNEMONIC("ffreep stN");
14010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14011
14012 IEM_MC_BEGIN(0, 0);
14013
14014 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14015 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14016
14017 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
14018 IEM_MC_FPU_STACK_INC_TOP();
14019 IEM_MC_UPDATE_FPU_OPCODE_IP();
14020
14021 IEM_MC_USED_FPU();
14022 IEM_MC_ADVANCE_RIP();
14023 IEM_MC_END();
14024 return VINF_SUCCESS;
14025}
14026
14027
14028/** Opcode 0xdf 0xe0. */
14029FNIEMOP_DEF(iemOp_fnstsw_ax)
14030{
14031 IEMOP_MNEMONIC("fnstsw ax");
14032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14033
14034 IEM_MC_BEGIN(0, 1);
14035 IEM_MC_LOCAL(uint16_t, u16Tmp);
14036 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14037 IEM_MC_FETCH_FSW(u16Tmp);
14038 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
14039 IEM_MC_ADVANCE_RIP();
14040 IEM_MC_END();
14041 return VINF_SUCCESS;
14042}
14043
14044
14045/** Opcode 0xdf 11/5. */
14046FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
14047{
14048 IEMOP_MNEMONIC("fcomip st0,stN");
14049 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
14050}
14051
14052
14053/** Opcode 0xdf 11/6. */
14054FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
14055{
14056 IEMOP_MNEMONIC("fcomip st0,stN");
14057 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
14058}
14059
14060
14061/** Opcode 0xdf !11/0. */
14062FNIEMOP_STUB_1(iemOp_fild_m16i, uint8_t, bRm);
14063
14064
14065/** Opcode 0xdf !11/1. */
14066FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
14067{
14068 IEMOP_MNEMONIC("fisttp m16i");
14069 IEM_MC_BEGIN(3, 2);
14070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14071 IEM_MC_LOCAL(uint16_t, u16Fsw);
14072 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14073 IEM_MC_ARG(int16_t *, pi16Dst, 1);
14074 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14075
14076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14078 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14079 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14080
14081 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14082 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14083 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
14084 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
14085 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14086 IEM_MC_ELSE()
14087 IEM_MC_IF_FCW_IM()
14088 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
14089 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
14090 IEM_MC_ENDIF();
14091 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14092 IEM_MC_ENDIF();
14093 IEM_MC_USED_FPU();
14094 IEM_MC_ADVANCE_RIP();
14095
14096 IEM_MC_END();
14097 return VINF_SUCCESS;
14098}
14099
14100
14101/** Opcode 0xdf !11/2. */
14102FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
14103{
14104 IEMOP_MNEMONIC("fistp m16i");
14105 IEM_MC_BEGIN(3, 2);
14106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14107 IEM_MC_LOCAL(uint16_t, u16Fsw);
14108 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14109 IEM_MC_ARG(int16_t *, pi16Dst, 1);
14110 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14111
14112 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14114 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14115 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14116
14117 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14118 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14119 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
14120 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
14121 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14122 IEM_MC_ELSE()
14123 IEM_MC_IF_FCW_IM()
14124 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
14125 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
14126 IEM_MC_ENDIF();
14127 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14128 IEM_MC_ENDIF();
14129 IEM_MC_USED_FPU();
14130 IEM_MC_ADVANCE_RIP();
14131
14132 IEM_MC_END();
14133 return VINF_SUCCESS;
14134}
14135
14136
14137/** Opcode 0xdf !11/3. */
14138FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
14139{
14140 IEMOP_MNEMONIC("fistp m16i");
14141 IEM_MC_BEGIN(3, 2);
14142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14143 IEM_MC_LOCAL(uint16_t, u16Fsw);
14144 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14145 IEM_MC_ARG(int16_t *, pi16Dst, 1);
14146 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14147
14148 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14150 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14151 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14152
14153 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14154 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14155 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
14156 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
14157 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14158 IEM_MC_ELSE()
14159 IEM_MC_IF_FCW_IM()
14160 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
14161 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
14162 IEM_MC_ENDIF();
14163 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14164 IEM_MC_ENDIF();
14165 IEM_MC_USED_FPU();
14166 IEM_MC_ADVANCE_RIP();
14167
14168 IEM_MC_END();
14169 return VINF_SUCCESS;
14170}
14171
14172
14173/** Opcode 0xdf !11/4. */
14174FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
14175
14176/** Opcode 0xdf !11/5. */
14177FNIEMOP_STUB_1(iemOp_fild_m64i, uint8_t, bRm);
14178
14179/** Opcode 0xdf !11/6. */
14180FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
14181
14182
14183/** Opcode 0xdf !11/7. */
14184FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
14185{
14186 IEMOP_MNEMONIC("fistp m64i");
14187 IEM_MC_BEGIN(3, 2);
14188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14189 IEM_MC_LOCAL(uint16_t, u16Fsw);
14190 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14191 IEM_MC_ARG(int64_t *, pi64Dst, 1);
14192 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14193
14194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14196 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14197 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14198
14199 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14200 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14201 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
14202 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
14203 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14204 IEM_MC_ELSE()
14205 IEM_MC_IF_FCW_IM()
14206 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
14207 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
14208 IEM_MC_ENDIF();
14209 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14210 IEM_MC_ENDIF();
14211 IEM_MC_USED_FPU();
14212 IEM_MC_ADVANCE_RIP();
14213
14214 IEM_MC_END();
14215 return VINF_SUCCESS;
14216}
14217
14218
14219/** Opcode 0xdf. */
14220FNIEMOP_DEF(iemOp_EscF7)
14221{
14222 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14223 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14224 {
14225 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14226 {
14227 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
14228 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
14229 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
14230 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
14231 case 4: if (bRm == 0xe0)
14232 return FNIEMOP_CALL(iemOp_fnstsw_ax);
14233 return IEMOP_RAISE_INVALID_OPCODE();
14234 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
14235 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
14236 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14237 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14238 }
14239 }
14240 else
14241 {
14242 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14243 {
14244 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
14245 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
14246 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
14247 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
14248 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
14249 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
14250 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
14251 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
14252 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14253 }
14254 }
14255}
14256
14257
14258/** Opcode 0xe0. */
14259FNIEMOP_DEF(iemOp_loopne_Jb)
14260{
14261 IEMOP_MNEMONIC("loopne Jb");
14262 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
14263 IEMOP_HLP_NO_LOCK_PREFIX();
14264 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14265
14266 switch (pIemCpu->enmEffAddrMode)
14267 {
14268 case IEMMODE_16BIT:
14269 IEM_MC_BEGIN(0,0);
14270 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
14271 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
14272 IEM_MC_REL_JMP_S8(i8Imm);
14273 } IEM_MC_ELSE() {
14274 IEM_MC_ADVANCE_RIP();
14275 } IEM_MC_ENDIF();
14276 IEM_MC_END();
14277 return VINF_SUCCESS;
14278
14279 case IEMMODE_32BIT:
14280 IEM_MC_BEGIN(0,0);
14281 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
14282 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
14283 IEM_MC_REL_JMP_S8(i8Imm);
14284 } IEM_MC_ELSE() {
14285 IEM_MC_ADVANCE_RIP();
14286 } IEM_MC_ENDIF();
14287 IEM_MC_END();
14288 return VINF_SUCCESS;
14289
14290 case IEMMODE_64BIT:
14291 IEM_MC_BEGIN(0,0);
14292 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
14293 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
14294 IEM_MC_REL_JMP_S8(i8Imm);
14295 } IEM_MC_ELSE() {
14296 IEM_MC_ADVANCE_RIP();
14297 } IEM_MC_ENDIF();
14298 IEM_MC_END();
14299 return VINF_SUCCESS;
14300
14301 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14302 }
14303}
14304
14305
14306/** Opcode 0xe1. */
14307FNIEMOP_DEF(iemOp_loope_Jb)
14308{
14309 IEMOP_MNEMONIC("loope Jb");
14310 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
14311 IEMOP_HLP_NO_LOCK_PREFIX();
14312 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14313
14314 switch (pIemCpu->enmEffAddrMode)
14315 {
14316 case IEMMODE_16BIT:
14317 IEM_MC_BEGIN(0,0);
14318 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
14319 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
14320 IEM_MC_REL_JMP_S8(i8Imm);
14321 } IEM_MC_ELSE() {
14322 IEM_MC_ADVANCE_RIP();
14323 } IEM_MC_ENDIF();
14324 IEM_MC_END();
14325 return VINF_SUCCESS;
14326
14327 case IEMMODE_32BIT:
14328 IEM_MC_BEGIN(0,0);
14329 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
14330 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
14331 IEM_MC_REL_JMP_S8(i8Imm);
14332 } IEM_MC_ELSE() {
14333 IEM_MC_ADVANCE_RIP();
14334 } IEM_MC_ENDIF();
14335 IEM_MC_END();
14336 return VINF_SUCCESS;
14337
14338 case IEMMODE_64BIT:
14339 IEM_MC_BEGIN(0,0);
14340 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
14341 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
14342 IEM_MC_REL_JMP_S8(i8Imm);
14343 } IEM_MC_ELSE() {
14344 IEM_MC_ADVANCE_RIP();
14345 } IEM_MC_ENDIF();
14346 IEM_MC_END();
14347 return VINF_SUCCESS;
14348
14349 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14350 }
14351}
14352
14353
14354/** Opcode 0xe2. */
14355FNIEMOP_DEF(iemOp_loop_Jb)
14356{
14357 IEMOP_MNEMONIC("loop Jb");
14358 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
14359 IEMOP_HLP_NO_LOCK_PREFIX();
14360 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14361
14362 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
14363 * using the 32-bit operand size override. How can that be restarted? See
14364 * weird pseudo code in intel manual. */
14365 switch (pIemCpu->enmEffAddrMode)
14366 {
14367 case IEMMODE_16BIT:
14368 IEM_MC_BEGIN(0,0);
14369 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
14370 IEM_MC_IF_CX_IS_NZ() {
14371 IEM_MC_REL_JMP_S8(i8Imm);
14372 } IEM_MC_ELSE() {
14373 IEM_MC_ADVANCE_RIP();
14374 } IEM_MC_ENDIF();
14375 IEM_MC_END();
14376 return VINF_SUCCESS;
14377
14378 case IEMMODE_32BIT:
14379 IEM_MC_BEGIN(0,0);
14380 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
14381 IEM_MC_IF_ECX_IS_NZ() {
14382 IEM_MC_REL_JMP_S8(i8Imm);
14383 } IEM_MC_ELSE() {
14384 IEM_MC_ADVANCE_RIP();
14385 } IEM_MC_ENDIF();
14386 IEM_MC_END();
14387 return VINF_SUCCESS;
14388
14389 case IEMMODE_64BIT:
14390 IEM_MC_BEGIN(0,0);
14391 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
14392 IEM_MC_IF_RCX_IS_NZ() {
14393 IEM_MC_REL_JMP_S8(i8Imm);
14394 } IEM_MC_ELSE() {
14395 IEM_MC_ADVANCE_RIP();
14396 } IEM_MC_ENDIF();
14397 IEM_MC_END();
14398 return VINF_SUCCESS;
14399
14400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14401 }
14402}
14403
14404
14405/** Opcode 0xe3. */
14406FNIEMOP_DEF(iemOp_jecxz_Jb)
14407{
14408 IEMOP_MNEMONIC("jecxz Jb");
14409 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
14410 IEMOP_HLP_NO_LOCK_PREFIX();
14411 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14412
14413 switch (pIemCpu->enmEffAddrMode)
14414 {
14415 case IEMMODE_16BIT:
14416 IEM_MC_BEGIN(0,0);
14417 IEM_MC_IF_CX_IS_NZ() {
14418 IEM_MC_ADVANCE_RIP();
14419 } IEM_MC_ELSE() {
14420 IEM_MC_REL_JMP_S8(i8Imm);
14421 } IEM_MC_ENDIF();
14422 IEM_MC_END();
14423 return VINF_SUCCESS;
14424
14425 case IEMMODE_32BIT:
14426 IEM_MC_BEGIN(0,0);
14427 IEM_MC_IF_ECX_IS_NZ() {
14428 IEM_MC_ADVANCE_RIP();
14429 } IEM_MC_ELSE() {
14430 IEM_MC_REL_JMP_S8(i8Imm);
14431 } IEM_MC_ENDIF();
14432 IEM_MC_END();
14433 return VINF_SUCCESS;
14434
14435 case IEMMODE_64BIT:
14436 IEM_MC_BEGIN(0,0);
14437 IEM_MC_IF_RCX_IS_NZ() {
14438 IEM_MC_ADVANCE_RIP();
14439 } IEM_MC_ELSE() {
14440 IEM_MC_REL_JMP_S8(i8Imm);
14441 } IEM_MC_ENDIF();
14442 IEM_MC_END();
14443 return VINF_SUCCESS;
14444
14445 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14446 }
14447}
14448
14449
14450/** Opcode 0xe4 */
14451FNIEMOP_DEF(iemOp_in_AL_Ib)
14452{
14453 IEMOP_MNEMONIC("in eAX,Ib");
14454 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14455 IEMOP_HLP_NO_LOCK_PREFIX();
14456 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
14457}
14458
14459
14460/** Opcode 0xe5 */
14461FNIEMOP_DEF(iemOp_in_eAX_Ib)
14462{
14463 IEMOP_MNEMONIC("in eAX,Ib");
14464 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14465 IEMOP_HLP_NO_LOCK_PREFIX();
14466 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
14467}
14468
14469
14470/** Opcode 0xe6 */
14471FNIEMOP_DEF(iemOp_out_Ib_AL)
14472{
14473 IEMOP_MNEMONIC("out Ib,AL");
14474 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14475 IEMOP_HLP_NO_LOCK_PREFIX();
14476 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
14477}
14478
14479
14480/** Opcode 0xe7 */
14481FNIEMOP_DEF(iemOp_out_Ib_eAX)
14482{
14483 IEMOP_MNEMONIC("out Ib,eAX");
14484 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14485 IEMOP_HLP_NO_LOCK_PREFIX();
14486 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
14487}
14488
14489
14490/** Opcode 0xe8. */
14491FNIEMOP_DEF(iemOp_call_Jv)
14492{
14493 IEMOP_MNEMONIC("call Jv");
14494 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14495 switch (pIemCpu->enmEffOpSize)
14496 {
14497 case IEMMODE_16BIT:
14498 {
14499 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14500 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
14501 }
14502
14503 case IEMMODE_32BIT:
14504 {
14505 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14506 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
14507 }
14508
14509 case IEMMODE_64BIT:
14510 {
14511 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14512 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
14513 }
14514
14515 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14516 }
14517}
14518
14519
14520/** Opcode 0xe9. */
14521FNIEMOP_DEF(iemOp_jmp_Jv)
14522{
14523 IEMOP_MNEMONIC("jmp Jv");
14524 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14525 switch (pIemCpu->enmEffOpSize)
14526 {
14527 case IEMMODE_16BIT:
14528 {
14529 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
14530 IEM_MC_BEGIN(0, 0);
14531 IEM_MC_REL_JMP_S16(i16Imm);
14532 IEM_MC_END();
14533 return VINF_SUCCESS;
14534 }
14535
14536 case IEMMODE_64BIT:
14537 case IEMMODE_32BIT:
14538 {
14539 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
14540 IEM_MC_BEGIN(0, 0);
14541 IEM_MC_REL_JMP_S32(i32Imm);
14542 IEM_MC_END();
14543 return VINF_SUCCESS;
14544 }
14545
14546 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14547 }
14548}
14549
14550
14551/** Opcode 0xea. */
14552FNIEMOP_DEF(iemOp_jmp_Ap)
14553{
14554 IEMOP_MNEMONIC("jmp Ap");
14555 IEMOP_HLP_NO_64BIT();
14556
14557 /* Decode the far pointer address and pass it on to the far call C implementation. */
14558 uint32_t offSeg;
14559 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
14560 IEM_OPCODE_GET_NEXT_U32(&offSeg);
14561 else
14562 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
14563 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
14564 IEMOP_HLP_NO_LOCK_PREFIX();
14565 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pIemCpu->enmEffOpSize);
14566}
14567
14568
14569/** Opcode 0xeb. */
14570FNIEMOP_DEF(iemOp_jmp_Jb)
14571{
14572 IEMOP_MNEMONIC("jmp Jb");
14573 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
14574 IEMOP_HLP_NO_LOCK_PREFIX();
14575 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14576
14577 IEM_MC_BEGIN(0, 0);
14578 IEM_MC_REL_JMP_S8(i8Imm);
14579 IEM_MC_END();
14580 return VINF_SUCCESS;
14581}
14582
14583
14584/** Opcode 0xec */
14585FNIEMOP_DEF(iemOp_in_AL_DX)
14586{
14587 IEMOP_MNEMONIC("in AL,DX");
14588 IEMOP_HLP_NO_LOCK_PREFIX();
14589 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
14590}
14591
14592
14593/** Opcode 0xed */
14594FNIEMOP_DEF(iemOp_eAX_DX)
14595{
14596 IEMOP_MNEMONIC("in eAX,DX");
14597 IEMOP_HLP_NO_LOCK_PREFIX();
14598 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
14599}
14600
14601
14602/** Opcode 0xee */
14603FNIEMOP_DEF(iemOp_out_DX_AL)
14604{
14605 IEMOP_MNEMONIC("out DX,AL");
14606 IEMOP_HLP_NO_LOCK_PREFIX();
14607 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
14608}
14609
14610
14611/** Opcode 0xef */
14612FNIEMOP_DEF(iemOp_out_DX_eAX)
14613{
14614 IEMOP_MNEMONIC("out DX,eAX");
14615 IEMOP_HLP_NO_LOCK_PREFIX();
14616 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
14617}
14618
14619
14620/** Opcode 0xf0. */
14621FNIEMOP_DEF(iemOp_lock)
14622{
14623 pIemCpu->fPrefixes |= IEM_OP_PRF_LOCK;
14624
14625 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14626 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14627}
14628
14629
14630/** Opcode 0xf2. */
14631FNIEMOP_DEF(iemOp_repne)
14632{
14633 /* This overrides any previous REPE prefix. */
14634 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPZ;
14635 pIemCpu->fPrefixes |= IEM_OP_PRF_REPNZ;
14636
14637 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14638 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14639}
14640
14641
14642/** Opcode 0xf3. */
14643FNIEMOP_DEF(iemOp_repe)
14644{
14645 /* This overrides any previous REPNE prefix. */
14646 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPNZ;
14647 pIemCpu->fPrefixes |= IEM_OP_PRF_REPZ;
14648
14649 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
14650 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
14651}
14652
14653
14654/** Opcode 0xf4. */
14655FNIEMOP_DEF(iemOp_hlt)
14656{
14657 IEMOP_HLP_NO_LOCK_PREFIX();
14658 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
14659}
14660
14661
14662/** Opcode 0xf5. */
14663FNIEMOP_DEF(iemOp_cmc)
14664{
14665 IEMOP_MNEMONIC("cmc");
14666 IEMOP_HLP_NO_LOCK_PREFIX();
14667 IEM_MC_BEGIN(0, 0);
14668 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
14669 IEM_MC_ADVANCE_RIP();
14670 IEM_MC_END();
14671 return VINF_SUCCESS;
14672}
14673
14674
14675/**
14676 * Common implementation of 'inc/dec/not/neg Eb'.
14677 *
14678 * @param bRm The RM byte.
14679 * @param pImpl The instruction implementation.
14680 */
14681FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
14682{
14683 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14684 {
14685 /* register access */
14686 IEM_MC_BEGIN(2, 0);
14687 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14688 IEM_MC_ARG(uint32_t *, pEFlags, 1);
14689 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14690 IEM_MC_REF_EFLAGS(pEFlags);
14691 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
14692 IEM_MC_ADVANCE_RIP();
14693 IEM_MC_END();
14694 }
14695 else
14696 {
14697 /* memory access. */
14698 IEM_MC_BEGIN(2, 2);
14699 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14700 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
14701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14702
14703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14704 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14705 IEM_MC_FETCH_EFLAGS(EFlags);
14706 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
14707 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
14708 else
14709 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
14710
14711 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
14712 IEM_MC_COMMIT_EFLAGS(EFlags);
14713 IEM_MC_ADVANCE_RIP();
14714 IEM_MC_END();
14715 }
14716 return VINF_SUCCESS;
14717}
14718
14719
14720/**
14721 * Common implementation of 'inc/dec/not/neg Ev'.
14722 *
14723 * @param bRm The RM byte.
14724 * @param pImpl The instruction implementation.
14725 */
14726FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
14727{
14728 /* Registers are handled by a common worker. */
14729 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14730 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14731
14732 /* Memory we do here. */
14733 switch (pIemCpu->enmEffOpSize)
14734 {
14735 case IEMMODE_16BIT:
14736 IEM_MC_BEGIN(2, 2);
14737 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14738 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
14739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14740
14741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14742 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14743 IEM_MC_FETCH_EFLAGS(EFlags);
14744 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
14745 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
14746 else
14747 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
14748
14749 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
14750 IEM_MC_COMMIT_EFLAGS(EFlags);
14751 IEM_MC_ADVANCE_RIP();
14752 IEM_MC_END();
14753 return VINF_SUCCESS;
14754
14755 case IEMMODE_32BIT:
14756 IEM_MC_BEGIN(2, 2);
14757 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14758 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
14759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14760
14761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14762 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14763 IEM_MC_FETCH_EFLAGS(EFlags);
14764 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
14765 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
14766 else
14767 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
14768
14769 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
14770 IEM_MC_COMMIT_EFLAGS(EFlags);
14771 IEM_MC_ADVANCE_RIP();
14772 IEM_MC_END();
14773 return VINF_SUCCESS;
14774
14775 case IEMMODE_64BIT:
14776 IEM_MC_BEGIN(2, 2);
14777 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14778 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
14779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14780
14781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14782 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14783 IEM_MC_FETCH_EFLAGS(EFlags);
14784 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
14785 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
14786 else
14787 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
14788
14789 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
14790 IEM_MC_COMMIT_EFLAGS(EFlags);
14791 IEM_MC_ADVANCE_RIP();
14792 IEM_MC_END();
14793 return VINF_SUCCESS;
14794
14795 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14796 }
14797}
14798
14799
14800/** Opcode 0xf6 /0. */
14801FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
14802{
14803 IEMOP_MNEMONIC("test Eb,Ib");
14804 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14805
14806 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14807 {
14808 /* register access */
14809 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14810 IEMOP_HLP_NO_LOCK_PREFIX();
14811
14812 IEM_MC_BEGIN(3, 0);
14813 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14814 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
14815 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14816 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14817 IEM_MC_REF_EFLAGS(pEFlags);
14818 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
14819 IEM_MC_ADVANCE_RIP();
14820 IEM_MC_END();
14821 }
14822 else
14823 {
14824 /* memory access. */
14825 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
14826
14827 IEM_MC_BEGIN(3, 2);
14828 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
14829 IEM_MC_ARG(uint8_t, u8Src, 1);
14830 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14832
14833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14834 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
14835 IEM_MC_ASSIGN(u8Src, u8Imm);
14836 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14837 IEM_MC_FETCH_EFLAGS(EFlags);
14838 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
14839
14840 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
14841 IEM_MC_COMMIT_EFLAGS(EFlags);
14842 IEM_MC_ADVANCE_RIP();
14843 IEM_MC_END();
14844 }
14845 return VINF_SUCCESS;
14846}
14847
14848
14849/** Opcode 0xf7 /0. */
14850FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
14851{
14852 IEMOP_MNEMONIC("test Ev,Iv");
14853 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
14854 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14855
14856 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14857 {
14858 /* register access */
14859 switch (pIemCpu->enmEffOpSize)
14860 {
14861 case IEMMODE_16BIT:
14862 {
14863 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14864 IEM_MC_BEGIN(3, 0);
14865 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14866 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
14867 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14868 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14869 IEM_MC_REF_EFLAGS(pEFlags);
14870 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14871 IEM_MC_ADVANCE_RIP();
14872 IEM_MC_END();
14873 return VINF_SUCCESS;
14874 }
14875
14876 case IEMMODE_32BIT:
14877 {
14878 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14879 IEM_MC_BEGIN(3, 0);
14880 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14881 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
14882 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14883 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14884 IEM_MC_REF_EFLAGS(pEFlags);
14885 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14886 IEM_MC_ADVANCE_RIP();
14887 IEM_MC_END();
14888 return VINF_SUCCESS;
14889 }
14890
14891 case IEMMODE_64BIT:
14892 {
14893 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14894 IEM_MC_BEGIN(3, 0);
14895 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14896 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
14897 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14898 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
14899 IEM_MC_REF_EFLAGS(pEFlags);
14900 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14901 IEM_MC_ADVANCE_RIP();
14902 IEM_MC_END();
14903 return VINF_SUCCESS;
14904 }
14905
14906 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14907 }
14908 }
14909 else
14910 {
14911 /* memory access. */
14912 switch (pIemCpu->enmEffOpSize)
14913 {
14914 case IEMMODE_16BIT:
14915 {
14916 IEM_MC_BEGIN(3, 2);
14917 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14918 IEM_MC_ARG(uint16_t, u16Src, 1);
14919 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14921
14922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14923 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14924 IEM_MC_ASSIGN(u16Src, u16Imm);
14925 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14926 IEM_MC_FETCH_EFLAGS(EFlags);
14927 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14928
14929 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
14930 IEM_MC_COMMIT_EFLAGS(EFlags);
14931 IEM_MC_ADVANCE_RIP();
14932 IEM_MC_END();
14933 return VINF_SUCCESS;
14934 }
14935
14936 case IEMMODE_32BIT:
14937 {
14938 IEM_MC_BEGIN(3, 2);
14939 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14940 IEM_MC_ARG(uint32_t, u32Src, 1);
14941 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14943
14944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14945 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14946 IEM_MC_ASSIGN(u32Src, u32Imm);
14947 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14948 IEM_MC_FETCH_EFLAGS(EFlags);
14949 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14950
14951 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
14952 IEM_MC_COMMIT_EFLAGS(EFlags);
14953 IEM_MC_ADVANCE_RIP();
14954 IEM_MC_END();
14955 return VINF_SUCCESS;
14956 }
14957
14958 case IEMMODE_64BIT:
14959 {
14960 IEM_MC_BEGIN(3, 2);
14961 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14962 IEM_MC_ARG(uint64_t, u64Src, 1);
14963 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14965
14966 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
14967 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14968 IEM_MC_ASSIGN(u64Src, u64Imm);
14969 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
14970 IEM_MC_FETCH_EFLAGS(EFlags);
14971 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14972
14973 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
14974 IEM_MC_COMMIT_EFLAGS(EFlags);
14975 IEM_MC_ADVANCE_RIP();
14976 IEM_MC_END();
14977 return VINF_SUCCESS;
14978 }
14979
14980 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14981 }
14982 }
14983}
14984
14985
14986/** Opcode 0xf6 /4, /5, /6 and /7. */
14987FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
14988{
14989 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
14990
14991 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14992 {
14993 /* register access */
14994 IEMOP_HLP_NO_LOCK_PREFIX();
14995 IEM_MC_BEGIN(3, 0);
14996 IEM_MC_ARG(uint16_t *, pu16AX, 0);
14997 IEM_MC_ARG(uint8_t, u8Value, 1);
14998 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14999 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15000 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
15001 IEM_MC_REF_EFLAGS(pEFlags);
15002 IEM_MC_CALL_VOID_AIMPL_3(pfnU8, pu16AX, u8Value, pEFlags);
15003 IEM_MC_ADVANCE_RIP();
15004 IEM_MC_END();
15005 }
15006 else
15007 {
15008 /* memory access. */
15009 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
15010
15011 IEM_MC_BEGIN(3, 1);
15012 IEM_MC_ARG(uint16_t *, pu16AX, 0);
15013 IEM_MC_ARG(uint8_t, u8Value, 1);
15014 IEM_MC_ARG(uint32_t *, pEFlags, 2);
15015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15016
15017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
15018 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
15019 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
15020 IEM_MC_REF_EFLAGS(pEFlags);
15021 IEM_MC_CALL_VOID_AIMPL_3(pfnU8, pu16AX, u8Value, pEFlags);
15022
15023 IEM_MC_ADVANCE_RIP();
15024 IEM_MC_END();
15025 }
15026 return VINF_SUCCESS;
15027}
15028
15029
15030/** Opcode 0xf7 /4, /5, /6 and /7. */
15031FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
15032{
15033 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
15034 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
15035
15036 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15037 {
15038 /* register access */
15039 switch (pIemCpu->enmEffOpSize)
15040 {
15041 case IEMMODE_16BIT:
15042 {
15043 IEMOP_HLP_NO_LOCK_PREFIX();
15044 IEM_MC_BEGIN(4, 1);
15045 IEM_MC_ARG(uint16_t *, pu16AX, 0);
15046 IEM_MC_ARG(uint16_t *, pu16DX, 1);
15047 IEM_MC_ARG(uint16_t, u16Value, 2);
15048 IEM_MC_ARG(uint32_t *, pEFlags, 3);
15049 IEM_MC_LOCAL(int32_t, rc);
15050
15051 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15052 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
15053 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
15054 IEM_MC_REF_EFLAGS(pEFlags);
15055 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
15056 IEM_MC_IF_LOCAL_IS_Z(rc) {
15057 IEM_MC_ADVANCE_RIP();
15058 } IEM_MC_ELSE() {
15059 IEM_MC_RAISE_DIVIDE_ERROR();
15060 } IEM_MC_ENDIF();
15061
15062 IEM_MC_END();
15063 return VINF_SUCCESS;
15064 }
15065
15066 case IEMMODE_32BIT:
15067 {
15068 IEMOP_HLP_NO_LOCK_PREFIX();
15069 IEM_MC_BEGIN(4, 1);
15070 IEM_MC_ARG(uint32_t *, pu32AX, 0);
15071 IEM_MC_ARG(uint32_t *, pu32DX, 1);
15072 IEM_MC_ARG(uint32_t, u32Value, 2);
15073 IEM_MC_ARG(uint32_t *, pEFlags, 3);
15074 IEM_MC_LOCAL(int32_t, rc);
15075
15076 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15077 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
15078 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
15079 IEM_MC_REF_EFLAGS(pEFlags);
15080 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
15081 IEM_MC_IF_LOCAL_IS_Z(rc) {
15082 IEM_MC_ADVANCE_RIP();
15083 } IEM_MC_ELSE() {
15084 IEM_MC_RAISE_DIVIDE_ERROR();
15085 } IEM_MC_ENDIF();
15086
15087 IEM_MC_END();
15088 return VINF_SUCCESS;
15089 }
15090
15091 case IEMMODE_64BIT:
15092 {
15093 IEMOP_HLP_NO_LOCK_PREFIX();
15094 IEM_MC_BEGIN(4, 1);
15095 IEM_MC_ARG(uint64_t *, pu64AX, 0);
15096 IEM_MC_ARG(uint64_t *, pu64DX, 1);
15097 IEM_MC_ARG(uint64_t, u64Value, 2);
15098 IEM_MC_ARG(uint32_t *, pEFlags, 3);
15099 IEM_MC_LOCAL(int32_t, rc);
15100
15101 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15102 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
15103 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
15104 IEM_MC_REF_EFLAGS(pEFlags);
15105 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
15106 IEM_MC_IF_LOCAL_IS_Z(rc) {
15107 IEM_MC_ADVANCE_RIP();
15108 } IEM_MC_ELSE() {
15109 IEM_MC_RAISE_DIVIDE_ERROR();
15110 } IEM_MC_ENDIF();
15111
15112 IEM_MC_END();
15113 return VINF_SUCCESS;
15114 }
15115
15116 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15117 }
15118 }
15119 else
15120 {
15121 /* memory access. */
15122 switch (pIemCpu->enmEffOpSize)
15123 {
15124 case IEMMODE_16BIT:
15125 {
15126 IEMOP_HLP_NO_LOCK_PREFIX();
15127 IEM_MC_BEGIN(4, 2);
15128 IEM_MC_ARG(uint16_t *, pu16AX, 0);
15129 IEM_MC_ARG(uint16_t *, pu16DX, 1);
15130 IEM_MC_ARG(uint16_t, u16Value, 2);
15131 IEM_MC_ARG(uint32_t *, pEFlags, 3);
15132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15133 IEM_MC_LOCAL(int32_t, rc);
15134
15135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
15136 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
15137 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
15138 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
15139 IEM_MC_REF_EFLAGS(pEFlags);
15140 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
15141 IEM_MC_IF_LOCAL_IS_Z(rc) {
15142 IEM_MC_ADVANCE_RIP();
15143 } IEM_MC_ELSE() {
15144 IEM_MC_RAISE_DIVIDE_ERROR();
15145 } IEM_MC_ENDIF();
15146
15147 IEM_MC_END();
15148 return VINF_SUCCESS;
15149 }
15150
15151 case IEMMODE_32BIT:
15152 {
15153 IEMOP_HLP_NO_LOCK_PREFIX();
15154 IEM_MC_BEGIN(4, 2);
15155 IEM_MC_ARG(uint32_t *, pu32AX, 0);
15156 IEM_MC_ARG(uint32_t *, pu32DX, 1);
15157 IEM_MC_ARG(uint32_t, u32Value, 2);
15158 IEM_MC_ARG(uint32_t *, pEFlags, 3);
15159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15160 IEM_MC_LOCAL(int32_t, rc);
15161
15162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
15163 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
15164 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
15165 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
15166 IEM_MC_REF_EFLAGS(pEFlags);
15167 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
15168 IEM_MC_IF_LOCAL_IS_Z(rc) {
15169 IEM_MC_ADVANCE_RIP();
15170 } IEM_MC_ELSE() {
15171 IEM_MC_RAISE_DIVIDE_ERROR();
15172 } IEM_MC_ENDIF();
15173
15174 IEM_MC_END();
15175 return VINF_SUCCESS;
15176 }
15177
15178 case IEMMODE_64BIT:
15179 {
15180 IEMOP_HLP_NO_LOCK_PREFIX();
15181 IEM_MC_BEGIN(4, 2);
15182 IEM_MC_ARG(uint64_t *, pu64AX, 0);
15183 IEM_MC_ARG(uint64_t *, pu64DX, 1);
15184 IEM_MC_ARG(uint64_t, u64Value, 2);
15185 IEM_MC_ARG(uint32_t *, pEFlags, 3);
15186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15187 IEM_MC_LOCAL(int32_t, rc);
15188
15189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm);
15190 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
15191 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
15192 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
15193 IEM_MC_REF_EFLAGS(pEFlags);
15194 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
15195 IEM_MC_IF_LOCAL_IS_Z(rc) {
15196 IEM_MC_ADVANCE_RIP();
15197 } IEM_MC_ELSE() {
15198 IEM_MC_RAISE_DIVIDE_ERROR();
15199 } IEM_MC_ENDIF();
15200
15201 IEM_MC_END();
15202 return VINF_SUCCESS;
15203 }
15204
15205 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15206 }
15207 }
15208}
15209
15210/** Opcode 0xf6. */
15211FNIEMOP_DEF(iemOp_Grp3_Eb)
15212{
15213 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15214 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15215 {
15216 case 0:
15217 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
15218 case 1:
15219 return IEMOP_RAISE_INVALID_OPCODE();
15220 case 2:
15221 IEMOP_MNEMONIC("not Eb");
15222 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
15223 case 3:
15224 IEMOP_MNEMONIC("neg Eb");
15225 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
15226 case 4:
15227 IEMOP_MNEMONIC("mul Eb");
15228 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
15229 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
15230 case 5:
15231 IEMOP_MNEMONIC("imul Eb");
15232 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
15233 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
15234 case 6:
15235 IEMOP_MNEMONIC("div Eb");
15236 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
15237 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
15238 case 7:
15239 IEMOP_MNEMONIC("idiv Eb");
15240 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
15241 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
15242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15243 }
15244}
15245
15246
15247/** Opcode 0xf7. */
15248FNIEMOP_DEF(iemOp_Grp3_Ev)
15249{
15250 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15251 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15252 {
15253 case 0:
15254 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
15255 case 1:
15256 return IEMOP_RAISE_INVALID_OPCODE();
15257 case 2:
15258 IEMOP_MNEMONIC("not Ev");
15259 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
15260 case 3:
15261 IEMOP_MNEMONIC("neg Ev");
15262 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
15263 case 4:
15264 IEMOP_MNEMONIC("mul Ev");
15265 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
15266 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
15267 case 5:
15268 IEMOP_MNEMONIC("imul Ev");
15269 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
15270 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
15271 case 6:
15272 IEMOP_MNEMONIC("div Ev");
15273 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
15274 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
15275 case 7:
15276 IEMOP_MNEMONIC("idiv Ev");
15277 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
15278 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
15279 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15280 }
15281}
15282
15283
15284/** Opcode 0xf8. */
15285FNIEMOP_DEF(iemOp_clc)
15286{
15287 IEMOP_MNEMONIC("clc");
15288 IEMOP_HLP_NO_LOCK_PREFIX();
15289 IEM_MC_BEGIN(0, 0);
15290 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
15291 IEM_MC_ADVANCE_RIP();
15292 IEM_MC_END();
15293 return VINF_SUCCESS;
15294}
15295
15296
15297/** Opcode 0xf9. */
15298FNIEMOP_DEF(iemOp_stc)
15299{
15300 IEMOP_MNEMONIC("stc");
15301 IEMOP_HLP_NO_LOCK_PREFIX();
15302 IEM_MC_BEGIN(0, 0);
15303 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
15304 IEM_MC_ADVANCE_RIP();
15305 IEM_MC_END();
15306 return VINF_SUCCESS;
15307}
15308
15309
15310/** Opcode 0xfa. */
15311FNIEMOP_DEF(iemOp_cli)
15312{
15313 IEMOP_MNEMONIC("cli");
15314 IEMOP_HLP_NO_LOCK_PREFIX();
15315 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
15316}
15317
15318
15319FNIEMOP_DEF(iemOp_sti)
15320{
15321 IEMOP_MNEMONIC("sti");
15322 IEMOP_HLP_NO_LOCK_PREFIX();
15323 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
15324}
15325
15326
15327/** Opcode 0xfc. */
15328FNIEMOP_DEF(iemOp_cld)
15329{
15330 IEMOP_MNEMONIC("cld");
15331 IEMOP_HLP_NO_LOCK_PREFIX();
15332 IEM_MC_BEGIN(0, 0);
15333 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
15334 IEM_MC_ADVANCE_RIP();
15335 IEM_MC_END();
15336 return VINF_SUCCESS;
15337}
15338
15339
15340/** Opcode 0xfd. */
15341FNIEMOP_DEF(iemOp_std)
15342{
15343 IEMOP_MNEMONIC("std");
15344 IEMOP_HLP_NO_LOCK_PREFIX();
15345 IEM_MC_BEGIN(0, 0);
15346 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
15347 IEM_MC_ADVANCE_RIP();
15348 IEM_MC_END();
15349 return VINF_SUCCESS;
15350}
15351
15352
15353/** Opcode 0xfe. */
15354FNIEMOP_DEF(iemOp_Grp4)
15355{
15356 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15357 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15358 {
15359 case 0:
15360 IEMOP_MNEMONIC("inc Ev");
15361 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
15362 case 1:
15363 IEMOP_MNEMONIC("dec Ev");
15364 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
15365 default:
15366 IEMOP_MNEMONIC("grp4-ud");
15367 return IEMOP_RAISE_INVALID_OPCODE();
15368 }
15369}
15370
15371
15372/**
15373 * Opcode 0xff /2.
15374 * @param bRm The RM byte.
15375 */
15376FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
15377{
15378 IEMOP_MNEMONIC("calln Ev");
15379 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
15380 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15381
15382 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15383 {
15384 /* The new RIP is taken from a register. */
15385 switch (pIemCpu->enmEffOpSize)
15386 {
15387 case IEMMODE_16BIT:
15388 IEM_MC_BEGIN(1, 0);
15389 IEM_MC_ARG(uint16_t, u16Target, 0);
15390 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15391 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
15392 IEM_MC_END()
15393 return VINF_SUCCESS;
15394
15395 case IEMMODE_32BIT:
15396 IEM_MC_BEGIN(1, 0);
15397 IEM_MC_ARG(uint32_t, u32Target, 0);
15398 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15399 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
15400 IEM_MC_END()
15401 return VINF_SUCCESS;
15402
15403 case IEMMODE_64BIT:
15404 IEM_MC_BEGIN(1, 0);
15405 IEM_MC_ARG(uint64_t, u64Target, 0);
15406 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15407 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
15408 IEM_MC_END()
15409 return VINF_SUCCESS;
15410
15411 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15412 }
15413 }
15414 else
15415 {
15416 /* The new RIP is taken from a register. */
15417 switch (pIemCpu->enmEffOpSize)
15418 {
15419 case IEMMODE_16BIT:
15420 IEM_MC_BEGIN(1, 1);
15421 IEM_MC_ARG(uint16_t, u16Target, 0);
15422 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15424 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
15425 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
15426 IEM_MC_END()
15427 return VINF_SUCCESS;
15428
15429 case IEMMODE_32BIT:
15430 IEM_MC_BEGIN(1, 1);
15431 IEM_MC_ARG(uint32_t, u32Target, 0);
15432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15434 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
15435 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
15436 IEM_MC_END()
15437 return VINF_SUCCESS;
15438
15439 case IEMMODE_64BIT:
15440 IEM_MC_BEGIN(1, 1);
15441 IEM_MC_ARG(uint64_t, u64Target, 0);
15442 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15444 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
15445 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
15446 IEM_MC_END()
15447 return VINF_SUCCESS;
15448
15449 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15450 }
15451 }
15452}
15453
15454typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
15455
15456FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
15457{
15458 /* Registers? How?? */
15459 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15460 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
15461
15462 /* Far pointer loaded from memory. */
15463 switch (pIemCpu->enmEffOpSize)
15464 {
15465 case IEMMODE_16BIT:
15466 IEM_MC_BEGIN(3, 1);
15467 IEM_MC_ARG(uint16_t, u16Sel, 0);
15468 IEM_MC_ARG(uint16_t, offSeg, 1);
15469 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
15470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15473 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
15474 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 2);
15475 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
15476 IEM_MC_END();
15477 return VINF_SUCCESS;
15478
15479 case IEMMODE_32BIT:
15480 IEM_MC_BEGIN(3, 1);
15481 IEM_MC_ARG(uint16_t, u16Sel, 0);
15482 IEM_MC_ARG(uint32_t, offSeg, 1);
15483 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
15484 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15487 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
15488 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 4);
15489 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
15490 IEM_MC_END();
15491 return VINF_SUCCESS;
15492
15493 case IEMMODE_64BIT:
15494 IEM_MC_BEGIN(3, 1);
15495 IEM_MC_ARG(uint16_t, u16Sel, 0);
15496 IEM_MC_ARG(uint64_t, offSeg, 1);
15497 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
15498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15501 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
15502 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 8);
15503 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
15504 IEM_MC_END();
15505 return VINF_SUCCESS;
15506
15507 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15508 }
15509}
15510
15511
15512/**
15513 * Opcode 0xff /3.
15514 * @param bRm The RM byte.
15515 */
15516FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
15517{
15518 IEMOP_MNEMONIC("callf Ep");
15519 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
15520}
15521
15522
15523/**
15524 * Opcode 0xff /4.
15525 * @param bRm The RM byte.
15526 */
15527FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
15528{
15529 IEMOP_MNEMONIC("jmpn Ev");
15530 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
15531 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15532
15533 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15534 {
15535 /* The new RIP is taken from a register. */
15536 switch (pIemCpu->enmEffOpSize)
15537 {
15538 case IEMMODE_16BIT:
15539 IEM_MC_BEGIN(0, 1);
15540 IEM_MC_LOCAL(uint16_t, u16Target);
15541 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15542 IEM_MC_SET_RIP_U16(u16Target);
15543 IEM_MC_END()
15544 return VINF_SUCCESS;
15545
15546 case IEMMODE_32BIT:
15547 IEM_MC_BEGIN(0, 1);
15548 IEM_MC_LOCAL(uint32_t, u32Target);
15549 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15550 IEM_MC_SET_RIP_U32(u32Target);
15551 IEM_MC_END()
15552 return VINF_SUCCESS;
15553
15554 case IEMMODE_64BIT:
15555 IEM_MC_BEGIN(0, 1);
15556 IEM_MC_LOCAL(uint64_t, u64Target);
15557 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15558 IEM_MC_SET_RIP_U64(u64Target);
15559 IEM_MC_END()
15560 return VINF_SUCCESS;
15561
15562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15563 }
15564 }
15565 else
15566 {
15567 /* The new RIP is taken from a register. */
15568 switch (pIemCpu->enmEffOpSize)
15569 {
15570 case IEMMODE_16BIT:
15571 IEM_MC_BEGIN(0, 2);
15572 IEM_MC_LOCAL(uint16_t, u16Target);
15573 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15575 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
15576 IEM_MC_SET_RIP_U16(u16Target);
15577 IEM_MC_END()
15578 return VINF_SUCCESS;
15579
15580 case IEMMODE_32BIT:
15581 IEM_MC_BEGIN(0, 2);
15582 IEM_MC_LOCAL(uint32_t, u32Target);
15583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15585 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
15586 IEM_MC_SET_RIP_U32(u32Target);
15587 IEM_MC_END()
15588 return VINF_SUCCESS;
15589
15590 case IEMMODE_64BIT:
15591 IEM_MC_BEGIN(0, 2);
15592 IEM_MC_LOCAL(uint32_t, u32Target);
15593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15595 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
15596 IEM_MC_SET_RIP_U32(u32Target);
15597 IEM_MC_END()
15598 return VINF_SUCCESS;
15599
15600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15601 }
15602 }
15603}
15604
15605
15606/**
15607 * Opcode 0xff /5.
15608 * @param bRm The RM byte.
15609 */
15610FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
15611{
15612 IEMOP_MNEMONIC("jmp Ep");
15613 IEMOP_HLP_NO_64BIT();
15614 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
15615}
15616
15617
15618/**
15619 * Opcode 0xff /6.
15620 * @param bRm The RM byte.
15621 */
15622FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
15623{
15624 IEMOP_MNEMONIC("push Ev");
15625 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
15626
15627 /* Registers are handled by a common worker. */
15628 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15629 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
15630
15631 /* Memory we do here. */
15632 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15633 switch (pIemCpu->enmEffOpSize)
15634 {
15635 case IEMMODE_16BIT:
15636 IEM_MC_BEGIN(0, 2);
15637 IEM_MC_LOCAL(uint16_t, u16Src);
15638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15639 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15640 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffSrc);
15641 IEM_MC_PUSH_U16(u16Src);
15642 IEM_MC_ADVANCE_RIP();
15643 IEM_MC_END();
15644 return VINF_SUCCESS;
15645
15646 case IEMMODE_32BIT:
15647 IEM_MC_BEGIN(0, 2);
15648 IEM_MC_LOCAL(uint32_t, u32Src);
15649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15651 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffSrc);
15652 IEM_MC_PUSH_U32(u32Src);
15653 IEM_MC_ADVANCE_RIP();
15654 IEM_MC_END();
15655 return VINF_SUCCESS;
15656
15657 case IEMMODE_64BIT:
15658 IEM_MC_BEGIN(0, 2);
15659 IEM_MC_LOCAL(uint64_t, u64Src);
15660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm);
15662 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffSrc);
15663 IEM_MC_PUSH_U64(u64Src);
15664 IEM_MC_ADVANCE_RIP();
15665 IEM_MC_END();
15666 return VINF_SUCCESS;
15667
15668 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15669 }
15670}
15671
15672
15673/** Opcode 0xff. */
15674FNIEMOP_DEF(iemOp_Grp5)
15675{
15676 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15677 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15678 {
15679 case 0:
15680 IEMOP_MNEMONIC("inc Ev");
15681 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
15682 case 1:
15683 IEMOP_MNEMONIC("dec Ev");
15684 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
15685 case 2:
15686 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
15687 case 3:
15688 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
15689 case 4:
15690 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
15691 case 5:
15692 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
15693 case 6:
15694 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
15695 case 7:
15696 IEMOP_MNEMONIC("grp5-ud");
15697 return IEMOP_RAISE_INVALID_OPCODE();
15698 }
15699 AssertFailedReturn(VERR_INTERNAL_ERROR_2);
15700}
15701
15702
15703
15704const PFNIEMOP g_apfnOneByteMap[256] =
15705{
15706 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
15707 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
15708 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
15709 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
15710 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
15711 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
15712 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
15713 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
15714 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
15715 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
15716 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
15717 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
15718 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
15719 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
15720 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
15721 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
15722 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
15723 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
15724 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
15725 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
15726 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
15727 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
15728 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
15729 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
15730 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma, iemOp_arpl_Ew_Gw,
15731 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
15732 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
15733 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
15734 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
15735 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
15736 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
15737 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
15738 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
15739 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
15740 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
15741 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
15742 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
15743 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
15744 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
15745 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
15746 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
15747 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
15748 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
15749 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
15750 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
15751 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
15752 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
15753 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
15754 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
15755 /* 0xc4 */ iemOp_les_Gv_Mp, iemOp_lds_Gv_Mp, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
15756 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
15757 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
15758 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
15759 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_Invalid, iemOp_xlat,
15760 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
15761 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
15762 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
15763 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
15764 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
15765 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
15766 /* 0xf0 */ iemOp_lock, iemOp_Invalid, iemOp_repne, iemOp_repe, /** @todo 0xf1 is INT1 / ICEBP. */
15767 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
15768 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
15769 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
15770};
15771
15772
15773/** @} */
15774
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette