VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h@ 60309

Last change on this file since 60309 was 60188, checked in by vboxsync, 9 years ago

IEM: Fixed a couple of edge cases and broken verification mode.

  • Update enmCpuMode after loading hidden CS flags (prep for recompiling).
  • Fixed retf in 64-bit mode where we would load CS.BASE with zero when returning to 16-bit or 32-bit code.
  • Fixed ESP/SP handling for protected mode exception injection.
  • Fixed handling of lock prefixed INT xx and INT3.
  • Implemented the two string I/O notification functions that would assert in verification mode.
  • The IEMExec* methods must call iemUninitExec to undo poisoning of decoding data members as it will otherwise interfere with verification mode opcode fetching optimizations and other stuff.

The above makes the current bs3-cpu-basic-2 code work in --execute-all-in-iem mode.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 590.6 KB
Line 
1/* $Id: IEMAllInstructions.cpp.h 60188 2016-03-24 17:44:05Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2015 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25/**
26 * Common worker for instructions like ADD, AND, OR, ++ with a byte
27 * memory/register as the destination.
28 *
29 * @param pImpl Pointer to the instruction implementation (assembly).
30 */
31FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
32{
33 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
34
35 /*
36 * If rm is denoting a register, no more instruction bytes.
37 */
38 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
39 {
40 IEMOP_HLP_NO_LOCK_PREFIX();
41
42 IEM_MC_BEGIN(3, 0);
43 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
44 IEM_MC_ARG(uint8_t, u8Src, 1);
45 IEM_MC_ARG(uint32_t *, pEFlags, 2);
46
47 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
48 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
49 IEM_MC_REF_EFLAGS(pEFlags);
50 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
51
52 IEM_MC_ADVANCE_RIP();
53 IEM_MC_END();
54 }
55 else
56 {
57 /*
58 * We're accessing memory.
59 * Note! We're putting the eflags on the stack here so we can commit them
60 * after the memory.
61 */
62 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
63 IEM_MC_BEGIN(3, 2);
64 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
65 IEM_MC_ARG(uint8_t, u8Src, 1);
66 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
67 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
68
69 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
70 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
71 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
72 IEM_MC_FETCH_EFLAGS(EFlags);
73 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
74 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
75 else
76 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
77
78 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
79 IEM_MC_COMMIT_EFLAGS(EFlags);
80 IEM_MC_ADVANCE_RIP();
81 IEM_MC_END();
82 }
83 return VINF_SUCCESS;
84}
85
86
87/**
88 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
89 * memory/register as the destination.
90 *
91 * @param pImpl Pointer to the instruction implementation (assembly).
92 */
93FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
94{
95 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
96
97 /*
98 * If rm is denoting a register, no more instruction bytes.
99 */
100 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
101 {
102 IEMOP_HLP_NO_LOCK_PREFIX();
103
104 switch (pIemCpu->enmEffOpSize)
105 {
106 case IEMMODE_16BIT:
107 IEM_MC_BEGIN(3, 0);
108 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
109 IEM_MC_ARG(uint16_t, u16Src, 1);
110 IEM_MC_ARG(uint32_t *, pEFlags, 2);
111
112 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
113 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
114 IEM_MC_REF_EFLAGS(pEFlags);
115 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
116
117 IEM_MC_ADVANCE_RIP();
118 IEM_MC_END();
119 break;
120
121 case IEMMODE_32BIT:
122 IEM_MC_BEGIN(3, 0);
123 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
124 IEM_MC_ARG(uint32_t, u32Src, 1);
125 IEM_MC_ARG(uint32_t *, pEFlags, 2);
126
127 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
128 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
129 IEM_MC_REF_EFLAGS(pEFlags);
130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
131
132 if (pImpl != &g_iemAImpl_test)
133 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
134 IEM_MC_ADVANCE_RIP();
135 IEM_MC_END();
136 break;
137
138 case IEMMODE_64BIT:
139 IEM_MC_BEGIN(3, 0);
140 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
141 IEM_MC_ARG(uint64_t, u64Src, 1);
142 IEM_MC_ARG(uint32_t *, pEFlags, 2);
143
144 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
145 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
146 IEM_MC_REF_EFLAGS(pEFlags);
147 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
148
149 IEM_MC_ADVANCE_RIP();
150 IEM_MC_END();
151 break;
152 }
153 }
154 else
155 {
156 /*
157 * We're accessing memory.
158 * Note! We're putting the eflags on the stack here so we can commit them
159 * after the memory.
160 */
161 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
162 switch (pIemCpu->enmEffOpSize)
163 {
164 case IEMMODE_16BIT:
165 IEM_MC_BEGIN(3, 2);
166 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
167 IEM_MC_ARG(uint16_t, u16Src, 1);
168 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
170
171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
172 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
173 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
174 IEM_MC_FETCH_EFLAGS(EFlags);
175 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
176 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
177 else
178 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
179
180 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
181 IEM_MC_COMMIT_EFLAGS(EFlags);
182 IEM_MC_ADVANCE_RIP();
183 IEM_MC_END();
184 break;
185
186 case IEMMODE_32BIT:
187 IEM_MC_BEGIN(3, 2);
188 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
189 IEM_MC_ARG(uint32_t, u32Src, 1);
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
192
193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
194 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
195 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
196 IEM_MC_FETCH_EFLAGS(EFlags);
197 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
199 else
200 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
201
202 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
203 IEM_MC_COMMIT_EFLAGS(EFlags);
204 IEM_MC_ADVANCE_RIP();
205 IEM_MC_END();
206 break;
207
208 case IEMMODE_64BIT:
209 IEM_MC_BEGIN(3, 2);
210 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
211 IEM_MC_ARG(uint64_t, u64Src, 1);
212 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
214
215 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
216 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
217 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
218 IEM_MC_FETCH_EFLAGS(EFlags);
219 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
220 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
221 else
222 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
223
224 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
225 IEM_MC_COMMIT_EFLAGS(EFlags);
226 IEM_MC_ADVANCE_RIP();
227 IEM_MC_END();
228 break;
229 }
230 }
231 return VINF_SUCCESS;
232}
233
234
235/**
236 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
237 * the destination.
238 *
239 * @param pImpl Pointer to the instruction implementation (assembly).
240 */
241FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
242{
243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
244 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
245
246 /*
247 * If rm is denoting a register, no more instruction bytes.
248 */
249 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
250 {
251 IEM_MC_BEGIN(3, 0);
252 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
253 IEM_MC_ARG(uint8_t, u8Src, 1);
254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
255
256 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
257 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
258 IEM_MC_REF_EFLAGS(pEFlags);
259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
260
261 IEM_MC_ADVANCE_RIP();
262 IEM_MC_END();
263 }
264 else
265 {
266 /*
267 * We're accessing memory.
268 */
269 IEM_MC_BEGIN(3, 1);
270 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
271 IEM_MC_ARG(uint8_t, u8Src, 1);
272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
274
275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
276 IEM_MC_FETCH_MEM_U8(u8Src, pIemCpu->iEffSeg, GCPtrEffDst);
277 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
278 IEM_MC_REF_EFLAGS(pEFlags);
279 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
280
281 IEM_MC_ADVANCE_RIP();
282 IEM_MC_END();
283 }
284 return VINF_SUCCESS;
285}
286
287
288/**
289 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
290 * register as the destination.
291 *
292 * @param pImpl Pointer to the instruction implementation (assembly).
293 */
294FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
295{
296 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
297 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
298
299 /*
300 * If rm is denoting a register, no more instruction bytes.
301 */
302 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
303 {
304 switch (pIemCpu->enmEffOpSize)
305 {
306 case IEMMODE_16BIT:
307 IEM_MC_BEGIN(3, 0);
308 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
309 IEM_MC_ARG(uint16_t, u16Src, 1);
310 IEM_MC_ARG(uint32_t *, pEFlags, 2);
311
312 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
313 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
314 IEM_MC_REF_EFLAGS(pEFlags);
315 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
316
317 IEM_MC_ADVANCE_RIP();
318 IEM_MC_END();
319 break;
320
321 case IEMMODE_32BIT:
322 IEM_MC_BEGIN(3, 0);
323 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
324 IEM_MC_ARG(uint32_t, u32Src, 1);
325 IEM_MC_ARG(uint32_t *, pEFlags, 2);
326
327 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
328 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
329 IEM_MC_REF_EFLAGS(pEFlags);
330 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
331
332 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
333 IEM_MC_ADVANCE_RIP();
334 IEM_MC_END();
335 break;
336
337 case IEMMODE_64BIT:
338 IEM_MC_BEGIN(3, 0);
339 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
340 IEM_MC_ARG(uint64_t, u64Src, 1);
341 IEM_MC_ARG(uint32_t *, pEFlags, 2);
342
343 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
344 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
345 IEM_MC_REF_EFLAGS(pEFlags);
346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
347
348 IEM_MC_ADVANCE_RIP();
349 IEM_MC_END();
350 break;
351 }
352 }
353 else
354 {
355 /*
356 * We're accessing memory.
357 */
358 switch (pIemCpu->enmEffOpSize)
359 {
360 case IEMMODE_16BIT:
361 IEM_MC_BEGIN(3, 1);
362 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
363 IEM_MC_ARG(uint16_t, u16Src, 1);
364 IEM_MC_ARG(uint32_t *, pEFlags, 2);
365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
366
367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
368 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffDst);
369 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
370 IEM_MC_REF_EFLAGS(pEFlags);
371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
372
373 IEM_MC_ADVANCE_RIP();
374 IEM_MC_END();
375 break;
376
377 case IEMMODE_32BIT:
378 IEM_MC_BEGIN(3, 1);
379 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
380 IEM_MC_ARG(uint32_t, u32Src, 1);
381 IEM_MC_ARG(uint32_t *, pEFlags, 2);
382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
383
384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
385 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffDst);
386 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
387 IEM_MC_REF_EFLAGS(pEFlags);
388 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
389
390 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
391 IEM_MC_ADVANCE_RIP();
392 IEM_MC_END();
393 break;
394
395 case IEMMODE_64BIT:
396 IEM_MC_BEGIN(3, 1);
397 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
398 IEM_MC_ARG(uint64_t, u64Src, 1);
399 IEM_MC_ARG(uint32_t *, pEFlags, 2);
400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
401
402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
403 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffDst);
404 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
405 IEM_MC_REF_EFLAGS(pEFlags);
406 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
407
408 IEM_MC_ADVANCE_RIP();
409 IEM_MC_END();
410 break;
411 }
412 }
413 return VINF_SUCCESS;
414}
415
416
417/**
418 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
419 * a byte immediate.
420 *
421 * @param pImpl Pointer to the instruction implementation (assembly).
422 */
423FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
424{
425 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
426 IEMOP_HLP_NO_LOCK_PREFIX();
427
428 IEM_MC_BEGIN(3, 0);
429 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
430 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
431 IEM_MC_ARG(uint32_t *, pEFlags, 2);
432
433 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
434 IEM_MC_REF_EFLAGS(pEFlags);
435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
436
437 IEM_MC_ADVANCE_RIP();
438 IEM_MC_END();
439 return VINF_SUCCESS;
440}
441
442
443/**
444 * Common worker for instructions like ADD, AND, OR, ++ with working on
445 * AX/EAX/RAX with a word/dword immediate.
446 *
447 * @param pImpl Pointer to the instruction implementation (assembly).
448 */
449FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
450{
451 switch (pIemCpu->enmEffOpSize)
452 {
453 case IEMMODE_16BIT:
454 {
455 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
456 IEMOP_HLP_NO_LOCK_PREFIX();
457
458 IEM_MC_BEGIN(3, 0);
459 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
460 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
461 IEM_MC_ARG(uint32_t *, pEFlags, 2);
462
463 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
464 IEM_MC_REF_EFLAGS(pEFlags);
465 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
466
467 IEM_MC_ADVANCE_RIP();
468 IEM_MC_END();
469 return VINF_SUCCESS;
470 }
471
472 case IEMMODE_32BIT:
473 {
474 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
475 IEMOP_HLP_NO_LOCK_PREFIX();
476
477 IEM_MC_BEGIN(3, 0);
478 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
479 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
480 IEM_MC_ARG(uint32_t *, pEFlags, 2);
481
482 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
483 IEM_MC_REF_EFLAGS(pEFlags);
484 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
485
486 if (pImpl != &g_iemAImpl_test)
487 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
488 IEM_MC_ADVANCE_RIP();
489 IEM_MC_END();
490 return VINF_SUCCESS;
491 }
492
493 case IEMMODE_64BIT:
494 {
495 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
496 IEMOP_HLP_NO_LOCK_PREFIX();
497
498 IEM_MC_BEGIN(3, 0);
499 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
500 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
501 IEM_MC_ARG(uint32_t *, pEFlags, 2);
502
503 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
504 IEM_MC_REF_EFLAGS(pEFlags);
505 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
506
507 IEM_MC_ADVANCE_RIP();
508 IEM_MC_END();
509 return VINF_SUCCESS;
510 }
511
512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
513 }
514}
515
516
517/** Opcodes 0xf1, 0xd6. */
518FNIEMOP_DEF(iemOp_Invalid)
519{
520 IEMOP_MNEMONIC("Invalid");
521 return IEMOP_RAISE_INVALID_OPCODE();
522}
523
524
525
526/** @name ..... opcodes.
527 *
528 * @{
529 */
530
531/** @} */
532
533
534/** @name Two byte opcodes (first byte 0x0f).
535 *
536 * @{
537 */
538
539/** Opcode 0x0f 0x00 /0. */
540FNIEMOP_DEF_1(iemOp_Grp6_sldt, uint8_t, bRm)
541{
542 IEMOP_MNEMONIC("sldt Rv/Mw");
543 IEMOP_HLP_NO_REAL_OR_V86_MODE();
544
545 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
546 {
547 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
548 switch (pIemCpu->enmEffOpSize)
549 {
550 case IEMMODE_16BIT:
551 IEM_MC_BEGIN(0, 1);
552 IEM_MC_LOCAL(uint16_t, u16Ldtr);
553 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
554 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Ldtr);
555 IEM_MC_ADVANCE_RIP();
556 IEM_MC_END();
557 break;
558
559 case IEMMODE_32BIT:
560 IEM_MC_BEGIN(0, 1);
561 IEM_MC_LOCAL(uint32_t, u32Ldtr);
562 IEM_MC_FETCH_LDTR_U32(u32Ldtr);
563 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Ldtr);
564 IEM_MC_ADVANCE_RIP();
565 IEM_MC_END();
566 break;
567
568 case IEMMODE_64BIT:
569 IEM_MC_BEGIN(0, 1);
570 IEM_MC_LOCAL(uint64_t, u64Ldtr);
571 IEM_MC_FETCH_LDTR_U64(u64Ldtr);
572 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Ldtr);
573 IEM_MC_ADVANCE_RIP();
574 IEM_MC_END();
575 break;
576
577 IEM_NOT_REACHED_DEFAULT_CASE_RET();
578 }
579 }
580 else
581 {
582 IEM_MC_BEGIN(0, 2);
583 IEM_MC_LOCAL(uint16_t, u16Ldtr);
584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
586 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
587 IEM_MC_FETCH_LDTR_U16(u16Ldtr);
588 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Ldtr);
589 IEM_MC_ADVANCE_RIP();
590 IEM_MC_END();
591 }
592 return VINF_SUCCESS;
593}
594
595
596/** Opcode 0x0f 0x00 /1. */
597FNIEMOP_DEF_1(iemOp_Grp6_str, uint8_t, bRm)
598{
599 IEMOP_MNEMONIC("str Rv/Mw");
600 IEMOP_HLP_NO_REAL_OR_V86_MODE();
601
602 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
603 {
604 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
605 switch (pIemCpu->enmEffOpSize)
606 {
607 case IEMMODE_16BIT:
608 IEM_MC_BEGIN(0, 1);
609 IEM_MC_LOCAL(uint16_t, u16Tr);
610 IEM_MC_FETCH_TR_U16(u16Tr);
611 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tr);
612 IEM_MC_ADVANCE_RIP();
613 IEM_MC_END();
614 break;
615
616 case IEMMODE_32BIT:
617 IEM_MC_BEGIN(0, 1);
618 IEM_MC_LOCAL(uint32_t, u32Tr);
619 IEM_MC_FETCH_TR_U32(u32Tr);
620 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tr);
621 IEM_MC_ADVANCE_RIP();
622 IEM_MC_END();
623 break;
624
625 case IEMMODE_64BIT:
626 IEM_MC_BEGIN(0, 1);
627 IEM_MC_LOCAL(uint64_t, u64Tr);
628 IEM_MC_FETCH_TR_U64(u64Tr);
629 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tr);
630 IEM_MC_ADVANCE_RIP();
631 IEM_MC_END();
632 break;
633
634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
635 }
636 }
637 else
638 {
639 IEM_MC_BEGIN(0, 2);
640 IEM_MC_LOCAL(uint16_t, u16Tr);
641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
643 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
644 IEM_MC_FETCH_TR_U16(u16Tr);
645 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tr);
646 IEM_MC_ADVANCE_RIP();
647 IEM_MC_END();
648 }
649 return VINF_SUCCESS;
650}
651
652
653/** Opcode 0x0f 0x00 /2. */
654FNIEMOP_DEF_1(iemOp_Grp6_lldt, uint8_t, bRm)
655{
656 IEMOP_MNEMONIC("lldt Ew");
657 IEMOP_HLP_NO_REAL_OR_V86_MODE();
658
659 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
660 {
661 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
662 IEM_MC_BEGIN(1, 0);
663 IEM_MC_ARG(uint16_t, u16Sel, 0);
664 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
665 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
666 IEM_MC_END();
667 }
668 else
669 {
670 IEM_MC_BEGIN(1, 1);
671 IEM_MC_ARG(uint16_t, u16Sel, 0);
672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
674 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS);
675 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */
676 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
677 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel);
678 IEM_MC_END();
679 }
680 return VINF_SUCCESS;
681}
682
683
684/** Opcode 0x0f 0x00 /3. */
685FNIEMOP_DEF_1(iemOp_Grp6_ltr, uint8_t, bRm)
686{
687 IEMOP_MNEMONIC("ltr Ew");
688 IEMOP_HLP_NO_REAL_OR_V86_MODE();
689
690 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
691 {
692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
693 IEM_MC_BEGIN(1, 0);
694 IEM_MC_ARG(uint16_t, u16Sel, 0);
695 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
696 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
697 IEM_MC_END();
698 }
699 else
700 {
701 IEM_MC_BEGIN(1, 1);
702 IEM_MC_ARG(uint16_t, u16Sel, 0);
703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
706 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */
707 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
708 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel);
709 IEM_MC_END();
710 }
711 return VINF_SUCCESS;
712}
713
714
715/** Opcode 0x0f 0x00 /3. */
716FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite)
717{
718 IEMOP_HLP_NO_REAL_OR_V86_MODE();
719
720 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
721 {
722 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
723 IEM_MC_BEGIN(2, 0);
724 IEM_MC_ARG(uint16_t, u16Sel, 0);
725 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
726 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
727 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
728 IEM_MC_END();
729 }
730 else
731 {
732 IEM_MC_BEGIN(2, 1);
733 IEM_MC_ARG(uint16_t, u16Sel, 0);
734 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1);
735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
737 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
738 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
739 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg);
740 IEM_MC_END();
741 }
742 return VINF_SUCCESS;
743}
744
745
746/** Opcode 0x0f 0x00 /4. */
747FNIEMOP_DEF_1(iemOp_Grp6_verr, uint8_t, bRm)
748{
749 IEMOP_MNEMONIC("verr Ew");
750 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false);
751}
752
753
754/** Opcode 0x0f 0x00 /5. */
755FNIEMOP_DEF_1(iemOp_Grp6_verw, uint8_t, bRm)
756{
757 IEMOP_MNEMONIC("verr Ew");
758 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true);
759}
760
761
762/** Opcode 0x0f 0x00. */
763FNIEMOP_DEF(iemOp_Grp6)
764{
765 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
766 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
767 {
768 case 0: return FNIEMOP_CALL_1(iemOp_Grp6_sldt, bRm);
769 case 1: return FNIEMOP_CALL_1(iemOp_Grp6_str, bRm);
770 case 2: return FNIEMOP_CALL_1(iemOp_Grp6_lldt, bRm);
771 case 3: return FNIEMOP_CALL_1(iemOp_Grp6_ltr, bRm);
772 case 4: return FNIEMOP_CALL_1(iemOp_Grp6_verr, bRm);
773 case 5: return FNIEMOP_CALL_1(iemOp_Grp6_verw, bRm);
774 case 6: return IEMOP_RAISE_INVALID_OPCODE();
775 case 7: return IEMOP_RAISE_INVALID_OPCODE();
776 IEM_NOT_REACHED_DEFAULT_CASE_RET();
777 }
778
779}
780
781
782/** Opcode 0x0f 0x01 /0. */
783FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm)
784{
785 IEMOP_MNEMONIC("sgdt Ms");
786 IEMOP_HLP_64BIT_OP_SIZE();
787 IEM_MC_BEGIN(3, 1);
788 IEM_MC_ARG(uint8_t, iEffSeg, 0);
789 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
790 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
793 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
794 IEM_MC_CALL_CIMPL_3(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
795 IEM_MC_END();
796 return VINF_SUCCESS;
797}
798
799
800/** Opcode 0x0f 0x01 /0. */
801FNIEMOP_DEF(iemOp_Grp7_vmcall)
802{
803 IEMOP_BITCH_ABOUT_STUB();
804 return IEMOP_RAISE_INVALID_OPCODE();
805}
806
807
808/** Opcode 0x0f 0x01 /0. */
809FNIEMOP_DEF(iemOp_Grp7_vmlaunch)
810{
811 IEMOP_BITCH_ABOUT_STUB();
812 return IEMOP_RAISE_INVALID_OPCODE();
813}
814
815
816/** Opcode 0x0f 0x01 /0. */
817FNIEMOP_DEF(iemOp_Grp7_vmresume)
818{
819 IEMOP_BITCH_ABOUT_STUB();
820 return IEMOP_RAISE_INVALID_OPCODE();
821}
822
823
824/** Opcode 0x0f 0x01 /0. */
825FNIEMOP_DEF(iemOp_Grp7_vmxoff)
826{
827 IEMOP_BITCH_ABOUT_STUB();
828 return IEMOP_RAISE_INVALID_OPCODE();
829}
830
831
832/** Opcode 0x0f 0x01 /1. */
833FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm)
834{
835 IEMOP_MNEMONIC("sidt Ms");
836 IEMOP_HLP_64BIT_OP_SIZE();
837 IEM_MC_BEGIN(3, 1);
838 IEM_MC_ARG(uint8_t, iEffSeg, 0);
839 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
840 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
841 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
843 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
844 IEM_MC_CALL_CIMPL_3(iemCImpl_sidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
845 IEM_MC_END();
846 return VINF_SUCCESS;
847}
848
849
850/** Opcode 0x0f 0x01 /1. */
851FNIEMOP_DEF(iemOp_Grp7_monitor)
852{
853 IEMOP_MNEMONIC("monitor");
854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */
855 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pIemCpu->iEffSeg);
856}
857
858
859/** Opcode 0x0f 0x01 /1. */
860FNIEMOP_DEF(iemOp_Grp7_mwait)
861{
862 IEMOP_MNEMONIC("mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */
863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
864 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait);
865}
866
867
868/** Opcode 0x0f 0x01 /2. */
869FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm)
870{
871 IEMOP_MNEMONIC("lgdt");
872 IEMOP_HLP_64BIT_OP_SIZE();
873 IEM_MC_BEGIN(3, 1);
874 IEM_MC_ARG(uint8_t, iEffSeg, 0);
875 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
876 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pIemCpu->enmEffOpSize, 2);
877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
879 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
880 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
881 IEM_MC_END();
882 return VINF_SUCCESS;
883}
884
885
886/** Opcode 0x0f 0x01 0xd0. */
887FNIEMOP_DEF(iemOp_Grp7_xgetbv)
888{
889 IEMOP_MNEMONIC("xgetbv");
890 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
891 {
892 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
893 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv);
894 }
895 return IEMOP_RAISE_INVALID_OPCODE();
896}
897
898
899/** Opcode 0x0f 0x01 0xd1. */
900FNIEMOP_DEF(iemOp_Grp7_xsetbv)
901{
902 IEMOP_MNEMONIC("xsetbv");
903 if (IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fXSaveRstor)
904 {
905 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES();
906 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv);
907 }
908 return IEMOP_RAISE_INVALID_OPCODE();
909}
910
911
912/** Opcode 0x0f 0x01 /3. */
913FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm)
914{
915 IEMMODE enmEffOpSize = pIemCpu->enmCpuMode == IEMMODE_64BIT
916 ? IEMMODE_64BIT
917 : pIemCpu->enmEffOpSize;
918 IEM_MC_BEGIN(3, 1);
919 IEM_MC_ARG(uint8_t, iEffSeg, 0);
920 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1);
921 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2);
922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
924 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
925 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg);
926 IEM_MC_END();
927 return VINF_SUCCESS;
928}
929
930
931/** Opcode 0x0f 0x01 0xd8. */
932FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);
933
934/** Opcode 0x0f 0x01 0xd9. */
935FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);
936
937/** Opcode 0x0f 0x01 0xda. */
938FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);
939
940/** Opcode 0x0f 0x01 0xdb. */
941FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);
942
943/** Opcode 0x0f 0x01 0xdc. */
944FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);
945
946/** Opcode 0x0f 0x01 0xdd. */
947FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);
948
949/** Opcode 0x0f 0x01 0xde. */
950FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit);
951
952/** Opcode 0x0f 0x01 0xdf. */
953FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);
954
955/** Opcode 0x0f 0x01 /4. */
956FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm)
957{
958 IEMOP_MNEMONIC("smsw");
959 IEMOP_HLP_NO_LOCK_PREFIX();
960 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
961 {
962 switch (pIemCpu->enmEffOpSize)
963 {
964 case IEMMODE_16BIT:
965 IEM_MC_BEGIN(0, 1);
966 IEM_MC_LOCAL(uint16_t, u16Tmp);
967 IEM_MC_FETCH_CR0_U16(u16Tmp);
968 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Tmp);
969 IEM_MC_ADVANCE_RIP();
970 IEM_MC_END();
971 return VINF_SUCCESS;
972
973 case IEMMODE_32BIT:
974 IEM_MC_BEGIN(0, 1);
975 IEM_MC_LOCAL(uint32_t, u32Tmp);
976 IEM_MC_FETCH_CR0_U32(u32Tmp);
977 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
978 IEM_MC_ADVANCE_RIP();
979 IEM_MC_END();
980 return VINF_SUCCESS;
981
982 case IEMMODE_64BIT:
983 IEM_MC_BEGIN(0, 1);
984 IEM_MC_LOCAL(uint64_t, u64Tmp);
985 IEM_MC_FETCH_CR0_U64(u64Tmp);
986 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
987 IEM_MC_ADVANCE_RIP();
988 IEM_MC_END();
989 return VINF_SUCCESS;
990
991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
992 }
993 }
994 else
995 {
996 /* Ignore operand size here, memory refs are always 16-bit. */
997 IEM_MC_BEGIN(0, 2);
998 IEM_MC_LOCAL(uint16_t, u16Tmp);
999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1001 IEM_MC_FETCH_CR0_U16(u16Tmp);
1002 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
1003 IEM_MC_ADVANCE_RIP();
1004 IEM_MC_END();
1005 return VINF_SUCCESS;
1006 }
1007}
1008
1009
1010/** Opcode 0x0f 0x01 /6. */
1011FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm)
1012{
1013 /* The operand size is effectively ignored, all is 16-bit and only the
1014 lower 3-bits are used. */
1015 IEMOP_MNEMONIC("lmsw");
1016 IEMOP_HLP_NO_LOCK_PREFIX();
1017 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1018 {
1019 IEM_MC_BEGIN(1, 0);
1020 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1021 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1022 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1023 IEM_MC_END();
1024 }
1025 else
1026 {
1027 IEM_MC_BEGIN(1, 1);
1028 IEM_MC_ARG(uint16_t, u16Tmp, 0);
1029 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1031 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
1032 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp);
1033 IEM_MC_END();
1034 }
1035 return VINF_SUCCESS;
1036}
1037
1038
1039/** Opcode 0x0f 0x01 /7. */
1040FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm)
1041{
1042 IEMOP_MNEMONIC("invlpg");
1043 IEMOP_HLP_NO_LOCK_PREFIX();
1044 IEM_MC_BEGIN(1, 1);
1045 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0);
1046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1047 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst);
1048 IEM_MC_END();
1049 return VINF_SUCCESS;
1050}
1051
1052
1053/** Opcode 0x0f 0x01 /7. */
1054FNIEMOP_DEF(iemOp_Grp7_swapgs)
1055{
1056 IEMOP_MNEMONIC("swapgs");
1057 IEMOP_HLP_NO_LOCK_PREFIX();
1058 IEMOP_HLP_ONLY_64BIT();
1059 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs);
1060}
1061
1062
1063/** Opcode 0x0f 0x01 /7. */
1064FNIEMOP_DEF(iemOp_Grp7_rdtscp)
1065{
1066 NOREF(pIemCpu);
1067 IEMOP_BITCH_ABOUT_STUB();
1068 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1069}
1070
1071
1072/** Opcode 0x0f 0x01. */
1073FNIEMOP_DEF(iemOp_Grp7)
1074{
1075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1076 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1077 {
1078 case 0:
1079 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1080 return FNIEMOP_CALL_1(iemOp_Grp7_sgdt, bRm);
1081 switch (bRm & X86_MODRM_RM_MASK)
1082 {
1083 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall);
1084 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch);
1085 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume);
1086 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff);
1087 }
1088 return IEMOP_RAISE_INVALID_OPCODE();
1089
1090 case 1:
1091 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1092 return FNIEMOP_CALL_1(iemOp_Grp7_sidt, bRm);
1093 switch (bRm & X86_MODRM_RM_MASK)
1094 {
1095 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor);
1096 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait);
1097 }
1098 return IEMOP_RAISE_INVALID_OPCODE();
1099
1100 case 2:
1101 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1102 return FNIEMOP_CALL_1(iemOp_Grp7_lgdt, bRm);
1103 switch (bRm & X86_MODRM_RM_MASK)
1104 {
1105 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv);
1106 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv);
1107 }
1108 return IEMOP_RAISE_INVALID_OPCODE();
1109
1110 case 3:
1111 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1112 return FNIEMOP_CALL_1(iemOp_Grp7_lidt, bRm);
1113 switch (bRm & X86_MODRM_RM_MASK)
1114 {
1115 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun);
1116 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall);
1117 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload);
1118 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave);
1119 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi);
1120 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi);
1121 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit);
1122 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga);
1123 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1124 }
1125
1126 case 4:
1127 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm);
1128
1129 case 5:
1130 return IEMOP_RAISE_INVALID_OPCODE();
1131
1132 case 6:
1133 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm);
1134
1135 case 7:
1136 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1137 return FNIEMOP_CALL_1(iemOp_Grp7_invlpg, bRm);
1138 switch (bRm & X86_MODRM_RM_MASK)
1139 {
1140 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs);
1141 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp);
1142 }
1143 return IEMOP_RAISE_INVALID_OPCODE();
1144
1145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1146 }
1147}
1148
1149/** Opcode 0x0f 0x00 /3. */
1150FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar)
1151{
1152 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1154
1155 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1156 {
1157 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1158 switch (pIemCpu->enmEffOpSize)
1159 {
1160 case IEMMODE_16BIT:
1161 {
1162 IEM_MC_BEGIN(4, 0);
1163 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1164 IEM_MC_ARG(uint16_t, u16Sel, 1);
1165 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1166 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1167
1168 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1169 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1170 IEM_MC_REF_EFLAGS(pEFlags);
1171 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1172
1173 IEM_MC_END();
1174 return VINF_SUCCESS;
1175 }
1176
1177 case IEMMODE_32BIT:
1178 case IEMMODE_64BIT:
1179 {
1180 IEM_MC_BEGIN(4, 0);
1181 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1182 IEM_MC_ARG(uint16_t, u16Sel, 1);
1183 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1184 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1185
1186 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1187 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
1188 IEM_MC_REF_EFLAGS(pEFlags);
1189 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1190
1191 IEM_MC_END();
1192 return VINF_SUCCESS;
1193 }
1194
1195 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1196 }
1197 }
1198 else
1199 {
1200 switch (pIemCpu->enmEffOpSize)
1201 {
1202 case IEMMODE_16BIT:
1203 {
1204 IEM_MC_BEGIN(4, 1);
1205 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1206 IEM_MC_ARG(uint16_t, u16Sel, 1);
1207 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1208 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1210
1211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1212 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1213
1214 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1215 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1216 IEM_MC_REF_EFLAGS(pEFlags);
1217 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, pEFlags, fIsLarArg);
1218
1219 IEM_MC_END();
1220 return VINF_SUCCESS;
1221 }
1222
1223 case IEMMODE_32BIT:
1224 case IEMMODE_64BIT:
1225 {
1226 IEM_MC_BEGIN(4, 1);
1227 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1228 IEM_MC_ARG(uint16_t, u16Sel, 1);
1229 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1230 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 3);
1231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1232
1233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1234 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP);
1235/** @todo testcase: make sure it's a 16-bit read. */
1236
1237 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc);
1238 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
1239 IEM_MC_REF_EFLAGS(pEFlags);
1240 IEM_MC_CALL_CIMPL_4(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, pEFlags, fIsLarArg);
1241
1242 IEM_MC_END();
1243 return VINF_SUCCESS;
1244 }
1245
1246 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1247 }
1248 }
1249}
1250
1251
1252
1253/** Opcode 0x0f 0x02. */
1254FNIEMOP_DEF(iemOp_lar_Gv_Ew)
1255{
1256 IEMOP_MNEMONIC("lar Gv,Ew");
1257 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true);
1258}
1259
1260
1261/** Opcode 0x0f 0x03. */
1262FNIEMOP_DEF(iemOp_lsl_Gv_Ew)
1263{
1264 IEMOP_MNEMONIC("lsl Gv,Ew");
1265 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false);
1266}
1267
1268
1269/** Opcode 0x0f 0x04. */
1270FNIEMOP_DEF(iemOp_syscall)
1271{
1272 IEMOP_MNEMONIC("syscall");
1273 IEMOP_HLP_NO_LOCK_PREFIX();
1274 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall);
1275}
1276
1277
1278/** Opcode 0x0f 0x05. */
1279FNIEMOP_DEF(iemOp_clts)
1280{
1281 IEMOP_MNEMONIC("clts");
1282 IEMOP_HLP_NO_LOCK_PREFIX();
1283 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts);
1284}
1285
1286
1287/** Opcode 0x0f 0x06. */
1288FNIEMOP_DEF(iemOp_sysret)
1289{
1290 IEMOP_MNEMONIC("sysret");
1291 IEMOP_HLP_NO_LOCK_PREFIX();
1292 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret);
1293}
1294
1295
1296/** Opcode 0x0f 0x08. */
1297FNIEMOP_STUB(iemOp_invd);
1298
1299
1300/** Opcode 0x0f 0x09. */
1301FNIEMOP_DEF(iemOp_wbinvd)
1302{
1303 IEMOP_MNEMONIC("wbinvd");
1304 IEMOP_HLP_NO_LOCK_PREFIX();
1305 IEM_MC_BEGIN(0, 0);
1306 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO();
1307 IEM_MC_ADVANCE_RIP();
1308 IEM_MC_END();
1309 return VINF_SUCCESS; /* ignore for now */
1310}
1311
1312
1313/** Opcode 0x0f 0x0b. */
1314FNIEMOP_STUB(iemOp_ud2);
1315
1316/** Opcode 0x0f 0x0d. */
1317FNIEMOP_DEF(iemOp_nop_Ev_GrpP)
1318{
1319 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */
1320 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNowPrefetch)
1321 {
1322 IEMOP_MNEMONIC("GrpP");
1323 return IEMOP_RAISE_INVALID_OPCODE();
1324 }
1325
1326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1327 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1328 {
1329 IEMOP_MNEMONIC("GrpP");
1330 return IEMOP_RAISE_INVALID_OPCODE();
1331 }
1332
1333 IEMOP_HLP_NO_LOCK_PREFIX();
1334 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1335 {
1336 case 2: /* Aliased to /0 for the time being. */
1337 case 4: /* Aliased to /0 for the time being. */
1338 case 5: /* Aliased to /0 for the time being. */
1339 case 6: /* Aliased to /0 for the time being. */
1340 case 7: /* Aliased to /0 for the time being. */
1341 case 0: IEMOP_MNEMONIC("prefetch"); break;
1342 case 1: IEMOP_MNEMONIC("prefetchw"); break;
1343 case 3: IEMOP_MNEMONIC("prefetchw"); break;
1344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1345 }
1346
1347 IEM_MC_BEGIN(0, 1);
1348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1350 /* Currently a NOP. */
1351 IEM_MC_ADVANCE_RIP();
1352 IEM_MC_END();
1353 return VINF_SUCCESS;
1354}
1355
1356
1357/** Opcode 0x0f 0x0e. */
1358FNIEMOP_STUB(iemOp_femms);
1359
1360
1361/** Opcode 0x0f 0x0f 0x0c. */
1362FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq);
1363
1364/** Opcode 0x0f 0x0f 0x0d. */
1365FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq);
1366
1367/** Opcode 0x0f 0x0f 0x1c. */
1368FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq);
1369
1370/** Opcode 0x0f 0x0f 0x1d. */
1371FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq);
1372
1373/** Opcode 0x0f 0x0f 0x8a. */
1374FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq);
1375
1376/** Opcode 0x0f 0x0f 0x8e. */
1377FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq);
1378
1379/** Opcode 0x0f 0x0f 0x90. */
1380FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq);
1381
1382/** Opcode 0x0f 0x0f 0x94. */
1383FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq);
1384
1385/** Opcode 0x0f 0x0f 0x96. */
1386FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq);
1387
1388/** Opcode 0x0f 0x0f 0x97. */
1389FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1390
1391/** Opcode 0x0f 0x0f 0x9a. */
1392FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq);
1393
1394/** Opcode 0x0f 0x0f 0x9e. */
1395FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq);
1396
1397/** Opcode 0x0f 0x0f 0xa0. */
1398FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1399
1400/** Opcode 0x0f 0x0f 0xa4. */
1401FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq);
1402
1403/** Opcode 0x0f 0x0f 0xa6. */
1404FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1405
1406/** Opcode 0x0f 0x0f 0xa7. */
1407FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1408
1409/** Opcode 0x0f 0x0f 0xaa. */
1410FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq);
1411
1412/** Opcode 0x0f 0x0f 0xae. */
1413FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq);
1414
1415/** Opcode 0x0f 0x0f 0xb0. */
1416FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1417
1418/** Opcode 0x0f 0x0f 0xb4. */
1419FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq);
1420
1421/** Opcode 0x0f 0x0f 0xb6. */
1422FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1423
1424/** Opcode 0x0f 0x0f 0xb7. */
1425FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq);
1426
1427/** Opcode 0x0f 0x0f 0xbb. */
1428FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq);
1429
1430/** Opcode 0x0f 0x0f 0xbf. */
1431FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq);
1432
1433
1434/** Opcode 0x0f 0x0f. */
1435FNIEMOP_DEF(iemOp_3Dnow)
1436{
1437 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->f3DNow)
1438 {
1439 IEMOP_MNEMONIC("3Dnow");
1440 return IEMOP_RAISE_INVALID_OPCODE();
1441 }
1442
1443 /* This is pretty sparse, use switch instead of table. */
1444 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1445 switch (b)
1446 {
1447 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq);
1448 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq);
1449 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq);
1450 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq);
1451 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq);
1452 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq);
1453 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq);
1454 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq);
1455 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq);
1456 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq);
1457 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq);
1458 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq);
1459 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq);
1460 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq);
1461 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq);
1462 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq);
1463 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq);
1464 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq);
1465 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq);
1466 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq);
1467 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq);
1468 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq);
1469 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq);
1470 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq);
1471 default:
1472 return IEMOP_RAISE_INVALID_OPCODE();
1473 }
1474}
1475
1476
1477/** Opcode 0x0f 0x10. */
1478FNIEMOP_STUB(iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd);
1479/** Opcode 0x0f 0x11. */
1480FNIEMOP_STUB(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd);
1481/** Opcode 0x0f 0x12. */
1482FNIEMOP_STUB(iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq); //NEXT
1483/** Opcode 0x0f 0x13. */
1484FNIEMOP_STUB(iemOp_movlps_Mq_Vq__movlpd_Mq_Vq); //NEXT
1485/** Opcode 0x0f 0x14. */
1486FNIEMOP_STUB(iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq);
1487/** Opcode 0x0f 0x15. */
1488FNIEMOP_STUB(iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq);
1489/** Opcode 0x0f 0x16. */
1490FNIEMOP_STUB(iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq); //NEXT
1491/** Opcode 0x0f 0x17. */
1492FNIEMOP_STUB(iemOp_movhps_Mq_Vq__movhpd_Mq_Vq); //NEXT
1493
1494
1495/** Opcode 0x0f 0x18. */
1496FNIEMOP_DEF(iemOp_prefetch_Grp16)
1497{
1498 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1499 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1500 {
1501 IEMOP_HLP_NO_LOCK_PREFIX();
1502 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
1503 {
1504 case 4: /* Aliased to /0 for the time being according to AMD. */
1505 case 5: /* Aliased to /0 for the time being according to AMD. */
1506 case 6: /* Aliased to /0 for the time being according to AMD. */
1507 case 7: /* Aliased to /0 for the time being according to AMD. */
1508 case 0: IEMOP_MNEMONIC("prefetchNTA m8"); break;
1509 case 1: IEMOP_MNEMONIC("prefetchT0 m8"); break;
1510 case 2: IEMOP_MNEMONIC("prefetchT1 m8"); break;
1511 case 3: IEMOP_MNEMONIC("prefetchT2 m8"); break;
1512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1513 }
1514
1515 IEM_MC_BEGIN(0, 1);
1516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1518 /* Currently a NOP. */
1519 IEM_MC_ADVANCE_RIP();
1520 IEM_MC_END();
1521 return VINF_SUCCESS;
1522 }
1523
1524 return IEMOP_RAISE_INVALID_OPCODE();
1525}
1526
1527
1528/** Opcode 0x0f 0x19..0x1f. */
1529FNIEMOP_DEF(iemOp_nop_Ev)
1530{
1531 IEMOP_HLP_NO_LOCK_PREFIX();
1532 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1533 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1534 {
1535 IEM_MC_BEGIN(0, 0);
1536 IEM_MC_ADVANCE_RIP();
1537 IEM_MC_END();
1538 }
1539 else
1540 {
1541 IEM_MC_BEGIN(0, 1);
1542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1544 /* Currently a NOP. */
1545 IEM_MC_ADVANCE_RIP();
1546 IEM_MC_END();
1547 }
1548 return VINF_SUCCESS;
1549}
1550
1551
1552/** Opcode 0x0f 0x20. */
1553FNIEMOP_DEF(iemOp_mov_Rd_Cd)
1554{
1555 /* mod is ignored, as is operand size overrides. */
1556 IEMOP_MNEMONIC("mov Rd,Cd");
1557 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1558 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1559 else
1560 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1561
1562 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1563 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1564 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1565 {
1566 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1567 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1568 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1569 iCrReg |= 8;
1570 }
1571 switch (iCrReg)
1572 {
1573 case 0: case 2: case 3: case 4: case 8:
1574 break;
1575 default:
1576 return IEMOP_RAISE_INVALID_OPCODE();
1577 }
1578 IEMOP_HLP_DONE_DECODING();
1579
1580 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB, iCrReg);
1581}
1582
1583
1584/** Opcode 0x0f 0x21. */
1585FNIEMOP_DEF(iemOp_mov_Rd_Dd)
1586{
1587 IEMOP_MNEMONIC("mov Rd,Dd");
1588 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1589 IEMOP_HLP_NO_LOCK_PREFIX();
1590 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1591 return IEMOP_RAISE_INVALID_OPCODE();
1592 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd,
1593 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB,
1594 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK));
1595}
1596
1597
1598/** Opcode 0x0f 0x22. */
1599FNIEMOP_DEF(iemOp_mov_Cd_Rd)
1600{
1601 /* mod is ignored, as is operand size overrides. */
1602 IEMOP_MNEMONIC("mov Cd,Rd");
1603 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
1604 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_64BIT;
1605 else
1606 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_32BIT;
1607
1608 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1609 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
1610 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
1611 {
1612 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */
1613 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fMovCr8In32Bit)
1614 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */
1615 iCrReg |= 8;
1616 }
1617 switch (iCrReg)
1618 {
1619 case 0: case 2: case 3: case 4: case 8:
1620 break;
1621 default:
1622 return IEMOP_RAISE_INVALID_OPCODE();
1623 }
1624 IEMOP_HLP_DONE_DECODING();
1625
1626 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1627}
1628
1629
1630/** Opcode 0x0f 0x23. */
1631FNIEMOP_DEF(iemOp_mov_Dd_Rd)
1632{
1633 IEMOP_MNEMONIC("mov Dd,Rd");
1634 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1636 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_R)
1637 return IEMOP_RAISE_INVALID_OPCODE();
1638 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd,
1639 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK),
1640 (X86_MODRM_RM_MASK & bRm) | pIemCpu->uRexB);
1641}
1642
1643
1644/** Opcode 0x0f 0x24. */
1645FNIEMOP_DEF(iemOp_mov_Rd_Td)
1646{
1647 IEMOP_MNEMONIC("mov Rd,Td");
1648 /* The RM byte is not considered, see testcase. */
1649 return IEMOP_RAISE_INVALID_OPCODE();
1650}
1651
1652
1653/** Opcode 0x0f 0x26. */
1654FNIEMOP_DEF(iemOp_mov_Td_Rd)
1655{
1656 IEMOP_MNEMONIC("mov Td,Rd");
1657 /* The RM byte is not considered, see testcase. */
1658 return IEMOP_RAISE_INVALID_OPCODE();
1659}
1660
1661
1662/** Opcode 0x0f 0x28. */
1663FNIEMOP_STUB(iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd);
1664/** Opcode 0x0f 0x29. */
1665FNIEMOP_STUB(iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd);
1666/** Opcode 0x0f 0x2a. */
1667FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey); //NEXT
1668/** Opcode 0x0f 0x2b. */
1669FNIEMOP_STUB(iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd); //NEXT:XP
1670/** Opcode 0x0f 0x2c. */
1671FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd); //NEXT
1672/** Opcode 0x0f 0x2d. */
1673FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd);
1674/** Opcode 0x0f 0x2e. */
1675FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd); //NEXT
1676/** Opcode 0x0f 0x2f. */
1677FNIEMOP_STUB(iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd);
1678
1679
1680/** Opcode 0x0f 0x30. */
1681FNIEMOP_DEF(iemOp_wrmsr)
1682{
1683 IEMOP_MNEMONIC("wrmsr");
1684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1685 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr);
1686}
1687
1688
1689/** Opcode 0x0f 0x31. */
1690FNIEMOP_DEF(iemOp_rdtsc)
1691{
1692 IEMOP_MNEMONIC("rdtsc");
1693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1694 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc);
1695}
1696
1697
1698/** Opcode 0x0f 0x33. */
1699FNIEMOP_DEF(iemOp_rdmsr)
1700{
1701 IEMOP_MNEMONIC("rdmsr");
1702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1703 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr);
1704}
1705
1706
1707/** Opcode 0x0f 0x34. */
1708FNIEMOP_STUB(iemOp_rdpmc);
1709/** Opcode 0x0f 0x34. */
1710FNIEMOP_STUB(iemOp_sysenter);
1711/** Opcode 0x0f 0x35. */
1712FNIEMOP_STUB(iemOp_sysexit);
1713/** Opcode 0x0f 0x37. */
1714FNIEMOP_STUB(iemOp_getsec);
1715/** Opcode 0x0f 0x38. */
1716FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */
1717/** Opcode 0x0f 0x3a. */
1718FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */
1719/** Opcode 0x0f 0x3c (?). */
1720FNIEMOP_STUB(iemOp_movnti_Gv_Ev);
1721
1722/**
1723 * Implements a conditional move.
1724 *
1725 * Wish there was an obvious way to do this where we could share and reduce
1726 * code bloat.
1727 *
1728 * @param a_Cnd The conditional "microcode" operation.
1729 */
1730#define CMOV_X(a_Cnd) \
1731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
1732 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \
1733 { \
1734 switch (pIemCpu->enmEffOpSize) \
1735 { \
1736 case IEMMODE_16BIT: \
1737 IEM_MC_BEGIN(0, 1); \
1738 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1739 a_Cnd { \
1740 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1741 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1742 } IEM_MC_ENDIF(); \
1743 IEM_MC_ADVANCE_RIP(); \
1744 IEM_MC_END(); \
1745 return VINF_SUCCESS; \
1746 \
1747 case IEMMODE_32BIT: \
1748 IEM_MC_BEGIN(0, 1); \
1749 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1750 a_Cnd { \
1751 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1752 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1753 } IEM_MC_ELSE() { \
1754 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1755 } IEM_MC_ENDIF(); \
1756 IEM_MC_ADVANCE_RIP(); \
1757 IEM_MC_END(); \
1758 return VINF_SUCCESS; \
1759 \
1760 case IEMMODE_64BIT: \
1761 IEM_MC_BEGIN(0, 1); \
1762 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1763 a_Cnd { \
1764 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \
1765 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1766 } IEM_MC_ENDIF(); \
1767 IEM_MC_ADVANCE_RIP(); \
1768 IEM_MC_END(); \
1769 return VINF_SUCCESS; \
1770 \
1771 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1772 } \
1773 } \
1774 else \
1775 { \
1776 switch (pIemCpu->enmEffOpSize) \
1777 { \
1778 case IEMMODE_16BIT: \
1779 IEM_MC_BEGIN(0, 2); \
1780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1781 IEM_MC_LOCAL(uint16_t, u16Tmp); \
1782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1783 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1784 a_Cnd { \
1785 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \
1786 } IEM_MC_ENDIF(); \
1787 IEM_MC_ADVANCE_RIP(); \
1788 IEM_MC_END(); \
1789 return VINF_SUCCESS; \
1790 \
1791 case IEMMODE_32BIT: \
1792 IEM_MC_BEGIN(0, 2); \
1793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1794 IEM_MC_LOCAL(uint32_t, u32Tmp); \
1795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1796 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1797 a_Cnd { \
1798 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \
1799 } IEM_MC_ELSE() { \
1800 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \
1801 } IEM_MC_ENDIF(); \
1802 IEM_MC_ADVANCE_RIP(); \
1803 IEM_MC_END(); \
1804 return VINF_SUCCESS; \
1805 \
1806 case IEMMODE_64BIT: \
1807 IEM_MC_BEGIN(0, 2); \
1808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
1809 IEM_MC_LOCAL(uint64_t, u64Tmp); \
1810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \
1811 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \
1812 a_Cnd { \
1813 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \
1814 } IEM_MC_ENDIF(); \
1815 IEM_MC_ADVANCE_RIP(); \
1816 IEM_MC_END(); \
1817 return VINF_SUCCESS; \
1818 \
1819 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1820 } \
1821 } do {} while (0)
1822
1823
1824
1825/** Opcode 0x0f 0x40. */
1826FNIEMOP_DEF(iemOp_cmovo_Gv_Ev)
1827{
1828 IEMOP_MNEMONIC("cmovo Gv,Ev");
1829 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF));
1830}
1831
1832
1833/** Opcode 0x0f 0x41. */
1834FNIEMOP_DEF(iemOp_cmovno_Gv_Ev)
1835{
1836 IEMOP_MNEMONIC("cmovno Gv,Ev");
1837 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF));
1838}
1839
1840
1841/** Opcode 0x0f 0x42. */
1842FNIEMOP_DEF(iemOp_cmovc_Gv_Ev)
1843{
1844 IEMOP_MNEMONIC("cmovc Gv,Ev");
1845 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF));
1846}
1847
1848
1849/** Opcode 0x0f 0x43. */
1850FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev)
1851{
1852 IEMOP_MNEMONIC("cmovnc Gv,Ev");
1853 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF));
1854}
1855
1856
1857/** Opcode 0x0f 0x44. */
1858FNIEMOP_DEF(iemOp_cmove_Gv_Ev)
1859{
1860 IEMOP_MNEMONIC("cmove Gv,Ev");
1861 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF));
1862}
1863
1864
1865/** Opcode 0x0f 0x45. */
1866FNIEMOP_DEF(iemOp_cmovne_Gv_Ev)
1867{
1868 IEMOP_MNEMONIC("cmovne Gv,Ev");
1869 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF));
1870}
1871
1872
1873/** Opcode 0x0f 0x46. */
1874FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev)
1875{
1876 IEMOP_MNEMONIC("cmovbe Gv,Ev");
1877 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1878}
1879
1880
1881/** Opcode 0x0f 0x47. */
1882FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev)
1883{
1884 IEMOP_MNEMONIC("cmovnbe Gv,Ev");
1885 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF));
1886}
1887
1888
1889/** Opcode 0x0f 0x48. */
1890FNIEMOP_DEF(iemOp_cmovs_Gv_Ev)
1891{
1892 IEMOP_MNEMONIC("cmovs Gv,Ev");
1893 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF));
1894}
1895
1896
1897/** Opcode 0x0f 0x49. */
1898FNIEMOP_DEF(iemOp_cmovns_Gv_Ev)
1899{
1900 IEMOP_MNEMONIC("cmovns Gv,Ev");
1901 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF));
1902}
1903
1904
1905/** Opcode 0x0f 0x4a. */
1906FNIEMOP_DEF(iemOp_cmovp_Gv_Ev)
1907{
1908 IEMOP_MNEMONIC("cmovp Gv,Ev");
1909 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF));
1910}
1911
1912
1913/** Opcode 0x0f 0x4b. */
1914FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev)
1915{
1916 IEMOP_MNEMONIC("cmovnp Gv,Ev");
1917 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF));
1918}
1919
1920
1921/** Opcode 0x0f 0x4c. */
1922FNIEMOP_DEF(iemOp_cmovl_Gv_Ev)
1923{
1924 IEMOP_MNEMONIC("cmovl Gv,Ev");
1925 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF));
1926}
1927
1928
1929/** Opcode 0x0f 0x4d. */
1930FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev)
1931{
1932 IEMOP_MNEMONIC("cmovnl Gv,Ev");
1933 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF));
1934}
1935
1936
1937/** Opcode 0x0f 0x4e. */
1938FNIEMOP_DEF(iemOp_cmovle_Gv_Ev)
1939{
1940 IEMOP_MNEMONIC("cmovle Gv,Ev");
1941 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1942}
1943
1944
1945/** Opcode 0x0f 0x4f. */
1946FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev)
1947{
1948 IEMOP_MNEMONIC("cmovnle Gv,Ev");
1949 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF));
1950}
1951
1952#undef CMOV_X
1953
1954/** Opcode 0x0f 0x50. */
1955FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd);
1956/** Opcode 0x0f 0x51. */
1957FNIEMOP_STUB(iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd);
1958/** Opcode 0x0f 0x52. */
1959FNIEMOP_STUB(iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss);
1960/** Opcode 0x0f 0x53. */
1961FNIEMOP_STUB(iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss);
1962/** Opcode 0x0f 0x54. */
1963FNIEMOP_STUB(iemOp_andps_Vps_Wps__andpd_Wpd_Vpd);
1964/** Opcode 0x0f 0x55. */
1965FNIEMOP_STUB(iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd);
1966/** Opcode 0x0f 0x56. */
1967FNIEMOP_STUB(iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd);
1968/** Opcode 0x0f 0x57. */
1969FNIEMOP_STUB(iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd);
1970/** Opcode 0x0f 0x58. */
1971FNIEMOP_STUB(iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd); //NEXT
1972/** Opcode 0x0f 0x59. */
1973FNIEMOP_STUB(iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd);//NEXT
1974/** Opcode 0x0f 0x5a. */
1975FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd);
1976/** Opcode 0x0f 0x5b. */
1977FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps);
1978/** Opcode 0x0f 0x5c. */
1979FNIEMOP_STUB(iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd);
1980/** Opcode 0x0f 0x5d. */
1981FNIEMOP_STUB(iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd);
1982/** Opcode 0x0f 0x5e. */
1983FNIEMOP_STUB(iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd);
1984/** Opcode 0x0f 0x5f. */
1985FNIEMOP_STUB(iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd);
1986
1987
1988/**
1989 * Common worker for SSE2 and MMX instructions on the forms:
1990 * pxxxx xmm1, xmm2/mem128
1991 * pxxxx mm1, mm2/mem32
1992 *
1993 * The 2nd operand is the first half of a register, which in the memory case
1994 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit
1995 * memory accessed for MMX.
1996 *
1997 * Exceptions type 4.
1998 */
1999FNIEMOP_DEF_1(iemOpCommonMmxSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl)
2000{
2001 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2002 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2003 {
2004 case IEM_OP_PRF_SIZE_OP: /* SSE */
2005 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2006 {
2007 /*
2008 * Register, register.
2009 */
2010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2011 IEM_MC_BEGIN(2, 0);
2012 IEM_MC_ARG(uint128_t *, pDst, 0);
2013 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2014 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2015 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2016 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2017 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2018 IEM_MC_ADVANCE_RIP();
2019 IEM_MC_END();
2020 }
2021 else
2022 {
2023 /*
2024 * Register, memory.
2025 */
2026 IEM_MC_BEGIN(2, 2);
2027 IEM_MC_ARG(uint128_t *, pDst, 0);
2028 IEM_MC_LOCAL(uint64_t, uSrc);
2029 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2030 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2031
2032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2034 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2035 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2036
2037 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2038 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2039
2040 IEM_MC_ADVANCE_RIP();
2041 IEM_MC_END();
2042 }
2043 return VINF_SUCCESS;
2044
2045 case 0: /* MMX */
2046 if (!pImpl->pfnU64)
2047 return IEMOP_RAISE_INVALID_OPCODE();
2048 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2049 {
2050 /*
2051 * Register, register.
2052 */
2053 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2054 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2056 IEM_MC_BEGIN(2, 0);
2057 IEM_MC_ARG(uint64_t *, pDst, 0);
2058 IEM_MC_ARG(uint32_t const *, pSrc, 1);
2059 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2060 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2061 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2062 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2063 IEM_MC_ADVANCE_RIP();
2064 IEM_MC_END();
2065 }
2066 else
2067 {
2068 /*
2069 * Register, memory.
2070 */
2071 IEM_MC_BEGIN(2, 2);
2072 IEM_MC_ARG(uint64_t *, pDst, 0);
2073 IEM_MC_LOCAL(uint32_t, uSrc);
2074 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1);
2075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2076
2077 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2079 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2080 IEM_MC_FETCH_MEM_U32(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2081
2082 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2083 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2084
2085 IEM_MC_ADVANCE_RIP();
2086 IEM_MC_END();
2087 }
2088 return VINF_SUCCESS;
2089
2090 default:
2091 return IEMOP_RAISE_INVALID_OPCODE();
2092 }
2093}
2094
2095
2096/** Opcode 0x0f 0x60. */
2097FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq)
2098{
2099 IEMOP_MNEMONIC("punpcklbw");
2100 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklbw);
2101}
2102
2103
2104/** Opcode 0x0f 0x61. */
2105FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq)
2106{
2107 IEMOP_MNEMONIC("punpcklwd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */
2108 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklwd);
2109}
2110
2111
2112/** Opcode 0x0f 0x62. */
2113FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq)
2114{
2115 IEMOP_MNEMONIC("punpckldq");
2116 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpckldq);
2117}
2118
2119
2120/** Opcode 0x0f 0x63. */
2121FNIEMOP_STUB(iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq);
2122/** Opcode 0x0f 0x64. */
2123FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq);
2124/** Opcode 0x0f 0x65. */
2125FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq);
2126/** Opcode 0x0f 0x66. */
2127FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq);
2128/** Opcode 0x0f 0x67. */
2129FNIEMOP_STUB(iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq);
2130
2131
2132/**
2133 * Common worker for SSE2 and MMX instructions on the forms:
2134 * pxxxx xmm1, xmm2/mem128
2135 * pxxxx mm1, mm2/mem64
2136 *
2137 * The 2nd operand is the second half of a register, which in the memory case
2138 * means a 64-bit memory access for MMX, and for MMX a 128-bit aligned access
2139 * where it may read the full 128 bits or only the upper 64 bits.
2140 *
2141 * Exceptions type 4.
2142 */
2143FNIEMOP_DEF_1(iemOpCommonMmxSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl)
2144{
2145 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2146 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2147 {
2148 case IEM_OP_PRF_SIZE_OP: /* SSE */
2149 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2150 {
2151 /*
2152 * Register, register.
2153 */
2154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2155 IEM_MC_BEGIN(2, 0);
2156 IEM_MC_ARG(uint128_t *, pDst, 0);
2157 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2158 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2159 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2160 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2161 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2162 IEM_MC_ADVANCE_RIP();
2163 IEM_MC_END();
2164 }
2165 else
2166 {
2167 /*
2168 * Register, memory.
2169 */
2170 IEM_MC_BEGIN(2, 2);
2171 IEM_MC_ARG(uint128_t *, pDst, 0);
2172 IEM_MC_LOCAL(uint128_t, uSrc);
2173 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2175
2176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2178 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2179 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */
2180
2181 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2182 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2183
2184 IEM_MC_ADVANCE_RIP();
2185 IEM_MC_END();
2186 }
2187 return VINF_SUCCESS;
2188
2189 case 0: /* MMX */
2190 if (!pImpl->pfnU64)
2191 return IEMOP_RAISE_INVALID_OPCODE();
2192 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2193 {
2194 /*
2195 * Register, register.
2196 */
2197 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2198 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2200 IEM_MC_BEGIN(2, 0);
2201 IEM_MC_ARG(uint64_t *, pDst, 0);
2202 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2203 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2204 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2205 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2206 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2207 IEM_MC_ADVANCE_RIP();
2208 IEM_MC_END();
2209 }
2210 else
2211 {
2212 /*
2213 * Register, memory.
2214 */
2215 IEM_MC_BEGIN(2, 2);
2216 IEM_MC_ARG(uint64_t *, pDst, 0);
2217 IEM_MC_LOCAL(uint64_t, uSrc);
2218 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2220
2221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2223 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2224 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2225
2226 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2227 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2228
2229 IEM_MC_ADVANCE_RIP();
2230 IEM_MC_END();
2231 }
2232 return VINF_SUCCESS;
2233
2234 default:
2235 return IEMOP_RAISE_INVALID_OPCODE();
2236 }
2237}
2238
2239
2240/** Opcode 0x0f 0x68. */
2241FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq)
2242{
2243 IEMOP_MNEMONIC("punpckhbw");
2244 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw);
2245}
2246
2247
2248/** Opcode 0x0f 0x69. */
2249FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq)
2250{
2251 IEMOP_MNEMONIC("punpckhwd");
2252 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd);
2253}
2254
2255
2256/** Opcode 0x0f 0x6a. */
2257FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq)
2258{
2259 IEMOP_MNEMONIC("punpckhdq");
2260 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq);
2261}
2262
2263/** Opcode 0x0f 0x6b. */
2264FNIEMOP_STUB(iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq);
2265
2266
2267/** Opcode 0x0f 0x6c. */
2268FNIEMOP_DEF(iemOp_punpcklqdq_Vdq_Wdq)
2269{
2270 IEMOP_MNEMONIC("punpcklqdq");
2271 return FNIEMOP_CALL_1(iemOpCommonMmxSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq);
2272}
2273
2274
2275/** Opcode 0x0f 0x6d. */
2276FNIEMOP_DEF(iemOp_punpckhqdq_Vdq_Wdq)
2277{
2278 IEMOP_MNEMONIC("punpckhqdq");
2279 return FNIEMOP_CALL_1(iemOpCommonMmxSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq);
2280}
2281
2282
2283/** Opcode 0x0f 0x6e. */
2284FNIEMOP_DEF(iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey)
2285{
2286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2287 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2288 {
2289 case IEM_OP_PRF_SIZE_OP: /* SSE */
2290 IEMOP_MNEMONIC("movd/q Wd/q,Ed/q");
2291 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2292 {
2293 /* XMM, greg*/
2294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2295 IEM_MC_BEGIN(0, 1);
2296 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2297 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2298 {
2299 IEM_MC_LOCAL(uint64_t, u64Tmp);
2300 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2301 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2302 }
2303 else
2304 {
2305 IEM_MC_LOCAL(uint32_t, u32Tmp);
2306 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2307 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2308 }
2309 IEM_MC_ADVANCE_RIP();
2310 IEM_MC_END();
2311 }
2312 else
2313 {
2314 /* XMM, [mem] */
2315 IEM_MC_BEGIN(0, 2);
2316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2317 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2320 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2321 {
2322 IEM_MC_LOCAL(uint64_t, u64Tmp);
2323 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2324 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
2325 }
2326 else
2327 {
2328 IEM_MC_LOCAL(uint32_t, u32Tmp);
2329 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2330 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
2331 }
2332 IEM_MC_ADVANCE_RIP();
2333 IEM_MC_END();
2334 }
2335 return VINF_SUCCESS;
2336
2337 case 0: /* MMX */
2338 IEMOP_MNEMONIC("movq/d Pd/q,Ed/q");
2339 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2340 {
2341 /* MMX, greg */
2342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2343 IEM_MC_BEGIN(0, 1);
2344 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2345 IEM_MC_LOCAL(uint64_t, u64Tmp);
2346 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2347 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2348 else
2349 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2350 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2351 IEM_MC_ADVANCE_RIP();
2352 IEM_MC_END();
2353 }
2354 else
2355 {
2356 /* MMX, [mem] */
2357 IEM_MC_BEGIN(0, 2);
2358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2359 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2360 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2362 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2363 {
2364 IEM_MC_LOCAL(uint64_t, u64Tmp);
2365 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2366 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2367 }
2368 else
2369 {
2370 IEM_MC_LOCAL(uint32_t, u32Tmp);
2371 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2372 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp);
2373 }
2374 IEM_MC_ADVANCE_RIP();
2375 IEM_MC_END();
2376 }
2377 return VINF_SUCCESS;
2378
2379 default:
2380 return IEMOP_RAISE_INVALID_OPCODE();
2381 }
2382}
2383
2384
2385/** Opcode 0x0f 0x6f. */
2386FNIEMOP_DEF(iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq)
2387{
2388 bool fAligned = false;
2389 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2390 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2391 {
2392 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
2393 fAligned = true;
2394 case IEM_OP_PRF_REPZ: /* SSE unaligned */
2395 if (fAligned)
2396 IEMOP_MNEMONIC("movdqa Vdq,Wdq");
2397 else
2398 IEMOP_MNEMONIC("movdqu Vdq,Wdq");
2399 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2400 {
2401 /*
2402 * Register, register.
2403 */
2404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2405 IEM_MC_BEGIN(0, 1);
2406 IEM_MC_LOCAL(uint128_t, u128Tmp);
2407 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2408 IEM_MC_FETCH_XREG_U128(u128Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2409 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2410 IEM_MC_ADVANCE_RIP();
2411 IEM_MC_END();
2412 }
2413 else
2414 {
2415 /*
2416 * Register, memory.
2417 */
2418 IEM_MC_BEGIN(0, 2);
2419 IEM_MC_LOCAL(uint128_t, u128Tmp);
2420 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2421
2422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2424 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2425 if (fAligned)
2426 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2427 else
2428 IEM_MC_FETCH_MEM_U128(u128Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2429 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u128Tmp);
2430
2431 IEM_MC_ADVANCE_RIP();
2432 IEM_MC_END();
2433 }
2434 return VINF_SUCCESS;
2435
2436 case 0: /* MMX */
2437 IEMOP_MNEMONIC("movq Pq,Qq");
2438 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2439 {
2440 /*
2441 * Register, register.
2442 */
2443 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2444 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2446 IEM_MC_BEGIN(0, 1);
2447 IEM_MC_LOCAL(uint64_t, u64Tmp);
2448 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2449 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK);
2450 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2451 IEM_MC_ADVANCE_RIP();
2452 IEM_MC_END();
2453 }
2454 else
2455 {
2456 /*
2457 * Register, memory.
2458 */
2459 IEM_MC_BEGIN(0, 2);
2460 IEM_MC_LOCAL(uint64_t, u64Tmp);
2461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2462
2463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2465 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2466 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc);
2467 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp);
2468
2469 IEM_MC_ADVANCE_RIP();
2470 IEM_MC_END();
2471 }
2472 return VINF_SUCCESS;
2473
2474 default:
2475 return IEMOP_RAISE_INVALID_OPCODE();
2476 }
2477}
2478
2479
2480/** Opcode 0x0f 0x70. The immediate here is evil! */
2481FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib)
2482{
2483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2484 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2485 {
2486 case IEM_OP_PRF_SIZE_OP: /* SSE */
2487 case IEM_OP_PRF_REPNZ: /* SSE */
2488 case IEM_OP_PRF_REPZ: /* SSE */
2489 {
2490 PFNIEMAIMPLMEDIAPSHUF pfnAImpl;
2491 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2492 {
2493 case IEM_OP_PRF_SIZE_OP:
2494 IEMOP_MNEMONIC("pshufd Vdq,Wdq,Ib");
2495 pfnAImpl = iemAImpl_pshufd;
2496 break;
2497 case IEM_OP_PRF_REPNZ:
2498 IEMOP_MNEMONIC("pshuflw Vdq,Wdq,Ib");
2499 pfnAImpl = iemAImpl_pshuflw;
2500 break;
2501 case IEM_OP_PRF_REPZ:
2502 IEMOP_MNEMONIC("pshufhw Vdq,Wdq,Ib");
2503 pfnAImpl = iemAImpl_pshufhw;
2504 break;
2505 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2506 }
2507 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2508 {
2509 /*
2510 * Register, register.
2511 */
2512 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2514
2515 IEM_MC_BEGIN(3, 0);
2516 IEM_MC_ARG(uint128_t *, pDst, 0);
2517 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2518 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2519 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2520 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2521 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2522 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2523 IEM_MC_ADVANCE_RIP();
2524 IEM_MC_END();
2525 }
2526 else
2527 {
2528 /*
2529 * Register, memory.
2530 */
2531 IEM_MC_BEGIN(3, 2);
2532 IEM_MC_ARG(uint128_t *, pDst, 0);
2533 IEM_MC_LOCAL(uint128_t, uSrc);
2534 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2536
2537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2538 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2539 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2541 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2542
2543 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2544 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2545 IEM_MC_CALL_SSE_AIMPL_3(pfnAImpl, pDst, pSrc, bEvilArg);
2546
2547 IEM_MC_ADVANCE_RIP();
2548 IEM_MC_END();
2549 }
2550 return VINF_SUCCESS;
2551 }
2552
2553 case 0: /* MMX Extension */
2554 IEMOP_MNEMONIC("pshufw Pq,Qq,Ib");
2555 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2556 {
2557 /*
2558 * Register, register.
2559 */
2560 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2562
2563 IEM_MC_BEGIN(3, 0);
2564 IEM_MC_ARG(uint64_t *, pDst, 0);
2565 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2566 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2567 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2568 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2569 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2570 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2571 IEM_MC_ADVANCE_RIP();
2572 IEM_MC_END();
2573 }
2574 else
2575 {
2576 /*
2577 * Register, memory.
2578 */
2579 IEM_MC_BEGIN(3, 2);
2580 IEM_MC_ARG(uint64_t *, pDst, 0);
2581 IEM_MC_LOCAL(uint64_t, uSrc);
2582 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2584
2585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2586 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil);
2587 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2);
2588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2589 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
2590
2591 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2592 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2593 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg);
2594
2595 IEM_MC_ADVANCE_RIP();
2596 IEM_MC_END();
2597 }
2598 return VINF_SUCCESS;
2599
2600 default:
2601 return IEMOP_RAISE_INVALID_OPCODE();
2602 }
2603}
2604
2605
2606/** Opcode 0x0f 0x71 11/2. */
2607FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm);
2608
2609/** Opcode 0x66 0x0f 0x71 11/2. */
2610FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Udq_Ib, uint8_t, bRm);
2611
2612/** Opcode 0x0f 0x71 11/4. */
2613FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm);
2614
2615/** Opcode 0x66 0x0f 0x71 11/4. */
2616FNIEMOP_STUB_1(iemOp_Grp12_psraw_Udq_Ib, uint8_t, bRm);
2617
2618/** Opcode 0x0f 0x71 11/6. */
2619FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm);
2620
2621/** Opcode 0x66 0x0f 0x71 11/6. */
2622FNIEMOP_STUB_1(iemOp_Grp12_psllw_Udq_Ib, uint8_t, bRm);
2623
2624
2625/** Opcode 0x0f 0x71. */
2626FNIEMOP_DEF(iemOp_Grp12)
2627{
2628 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2629 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2630 return IEMOP_RAISE_INVALID_OPCODE();
2631 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2632 {
2633 case 0: case 1: case 3: case 5: case 7:
2634 return IEMOP_RAISE_INVALID_OPCODE();
2635 case 2:
2636 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2637 {
2638 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Nq_Ib, bRm);
2639 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psrlw_Udq_Ib, bRm);
2640 default: return IEMOP_RAISE_INVALID_OPCODE();
2641 }
2642 case 4:
2643 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2644 {
2645 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Nq_Ib, bRm);
2646 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psraw_Udq_Ib, bRm);
2647 default: return IEMOP_RAISE_INVALID_OPCODE();
2648 }
2649 case 6:
2650 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2651 {
2652 case 0: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Nq_Ib, bRm);
2653 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp12_psllw_Udq_Ib, bRm);
2654 default: return IEMOP_RAISE_INVALID_OPCODE();
2655 }
2656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2657 }
2658}
2659
2660
2661/** Opcode 0x0f 0x72 11/2. */
2662FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm);
2663
2664/** Opcode 0x66 0x0f 0x72 11/2. */
2665FNIEMOP_STUB_1(iemOp_Grp13_psrld_Udq_Ib, uint8_t, bRm);
2666
2667/** Opcode 0x0f 0x72 11/4. */
2668FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm);
2669
2670/** Opcode 0x66 0x0f 0x72 11/4. */
2671FNIEMOP_STUB_1(iemOp_Grp13_psrad_Udq_Ib, uint8_t, bRm);
2672
2673/** Opcode 0x0f 0x72 11/6. */
2674FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm);
2675
2676/** Opcode 0x66 0x0f 0x72 11/6. */
2677FNIEMOP_STUB_1(iemOp_Grp13_pslld_Udq_Ib, uint8_t, bRm);
2678
2679
2680/** Opcode 0x0f 0x72. */
2681FNIEMOP_DEF(iemOp_Grp13)
2682{
2683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2684 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2685 return IEMOP_RAISE_INVALID_OPCODE();
2686 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2687 {
2688 case 0: case 1: case 3: case 5: case 7:
2689 return IEMOP_RAISE_INVALID_OPCODE();
2690 case 2:
2691 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2692 {
2693 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Nq_Ib, bRm);
2694 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrld_Udq_Ib, bRm);
2695 default: return IEMOP_RAISE_INVALID_OPCODE();
2696 }
2697 case 4:
2698 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2699 {
2700 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Nq_Ib, bRm);
2701 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_psrad_Udq_Ib, bRm);
2702 default: return IEMOP_RAISE_INVALID_OPCODE();
2703 }
2704 case 6:
2705 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2706 {
2707 case 0: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Nq_Ib, bRm);
2708 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp13_pslld_Udq_Ib, bRm);
2709 default: return IEMOP_RAISE_INVALID_OPCODE();
2710 }
2711 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2712 }
2713}
2714
2715
2716/** Opcode 0x0f 0x73 11/2. */
2717FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm);
2718
2719/** Opcode 0x66 0x0f 0x73 11/2. */
2720FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Udq_Ib, uint8_t, bRm);
2721
2722/** Opcode 0x66 0x0f 0x73 11/3. */
2723FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Udq_Ib, uint8_t, bRm); //NEXT
2724
2725/** Opcode 0x0f 0x73 11/6. */
2726FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm);
2727
2728/** Opcode 0x66 0x0f 0x73 11/6. */
2729FNIEMOP_STUB_1(iemOp_Grp14_psllq_Udq_Ib, uint8_t, bRm);
2730
2731/** Opcode 0x66 0x0f 0x73 11/7. */
2732FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Udq_Ib, uint8_t, bRm); //NEXT
2733
2734
2735/** Opcode 0x0f 0x73. */
2736FNIEMOP_DEF(iemOp_Grp14)
2737{
2738 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2739 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
2740 return IEMOP_RAISE_INVALID_OPCODE();
2741 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2742 {
2743 case 0: case 1: case 4: case 5:
2744 return IEMOP_RAISE_INVALID_OPCODE();
2745 case 2:
2746 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2747 {
2748 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Nq_Ib, bRm);
2749 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrlq_Udq_Ib, bRm);
2750 default: return IEMOP_RAISE_INVALID_OPCODE();
2751 }
2752 case 3:
2753 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2754 {
2755 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psrldq_Udq_Ib, bRm);
2756 default: return IEMOP_RAISE_INVALID_OPCODE();
2757 }
2758 case 6:
2759 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2760 {
2761 case 0: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Nq_Ib, bRm);
2762 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_psllq_Udq_Ib, bRm);
2763 default: return IEMOP_RAISE_INVALID_OPCODE();
2764 }
2765 case 7:
2766 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2767 {
2768 case IEM_OP_PRF_SIZE_OP: return FNIEMOP_CALL_1(iemOp_Grp14_pslldq_Udq_Ib, bRm);
2769 default: return IEMOP_RAISE_INVALID_OPCODE();
2770 }
2771 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2772 }
2773}
2774
2775
2776/**
2777 * Common worker for SSE2 and MMX instructions on the forms:
2778 * pxxx mm1, mm2/mem64
2779 * pxxx xmm1, xmm2/mem128
2780 *
2781 * Proper alignment of the 128-bit operand is enforced.
2782 * Exceptions type 4. SSE2 and MMX cpuid checks.
2783 */
2784FNIEMOP_DEF_1(iemOpCommonMmxSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl)
2785{
2786 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2787 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2788 {
2789 case IEM_OP_PRF_SIZE_OP: /* SSE */
2790 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2791 {
2792 /*
2793 * Register, register.
2794 */
2795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2796 IEM_MC_BEGIN(2, 0);
2797 IEM_MC_ARG(uint128_t *, pDst, 0);
2798 IEM_MC_ARG(uint128_t const *, pSrc, 1);
2799 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2800 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2801 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
2802 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2803 IEM_MC_ADVANCE_RIP();
2804 IEM_MC_END();
2805 }
2806 else
2807 {
2808 /*
2809 * Register, memory.
2810 */
2811 IEM_MC_BEGIN(2, 2);
2812 IEM_MC_ARG(uint128_t *, pDst, 0);
2813 IEM_MC_LOCAL(uint128_t, uSrc);
2814 IEM_MC_ARG_LOCAL_REF(uint128_t const *, pSrc, uSrc, 1);
2815 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2816
2817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2819 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2820 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2821
2822 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2823 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc);
2824
2825 IEM_MC_ADVANCE_RIP();
2826 IEM_MC_END();
2827 }
2828 return VINF_SUCCESS;
2829
2830 case 0: /* MMX */
2831 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2832 {
2833 /*
2834 * Register, register.
2835 */
2836 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
2837 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
2838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2839 IEM_MC_BEGIN(2, 0);
2840 IEM_MC_ARG(uint64_t *, pDst, 0);
2841 IEM_MC_ARG(uint64_t const *, pSrc, 1);
2842 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2843 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2844 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
2845 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2846 IEM_MC_ADVANCE_RIP();
2847 IEM_MC_END();
2848 }
2849 else
2850 {
2851 /*
2852 * Register, memory.
2853 */
2854 IEM_MC_BEGIN(2, 2);
2855 IEM_MC_ARG(uint64_t *, pDst, 0);
2856 IEM_MC_LOCAL(uint64_t, uSrc);
2857 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1);
2858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2859
2860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2862 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2863 IEM_MC_FETCH_MEM_U64(uSrc, pIemCpu->iEffSeg, GCPtrEffSrc);
2864
2865 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2866 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc);
2867
2868 IEM_MC_ADVANCE_RIP();
2869 IEM_MC_END();
2870 }
2871 return VINF_SUCCESS;
2872
2873 default:
2874 return IEMOP_RAISE_INVALID_OPCODE();
2875 }
2876}
2877
2878
2879/** Opcode 0x0f 0x74. */
2880FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq)
2881{
2882 IEMOP_MNEMONIC("pcmpeqb");
2883 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb);
2884}
2885
2886
2887/** Opcode 0x0f 0x75. */
2888FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq)
2889{
2890 IEMOP_MNEMONIC("pcmpeqw");
2891 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw);
2892}
2893
2894
2895/** Opcode 0x0f 0x76. */
2896FNIEMOP_DEF(iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq)
2897{
2898 IEMOP_MNEMONIC("pcmpeqd");
2899 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd);
2900}
2901
2902
2903/** Opcode 0x0f 0x77. */
2904FNIEMOP_STUB(iemOp_emms);
2905/** Opcode 0x0f 0x78. */
2906FNIEMOP_UD_STUB(iemOp_vmread_AmdGrp17);
2907/** Opcode 0x0f 0x79. */
2908FNIEMOP_UD_STUB(iemOp_vmwrite);
2909/** Opcode 0x0f 0x7c. */
2910FNIEMOP_STUB(iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps);
2911/** Opcode 0x0f 0x7d. */
2912FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps);
2913
2914
2915/** Opcode 0x0f 0x7e. */
2916FNIEMOP_DEF(iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq)
2917{
2918 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2919 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2920 {
2921 case IEM_OP_PRF_SIZE_OP: /* SSE */
2922 IEMOP_MNEMONIC("movd/q Ed/q,Wd/q");
2923 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2924 {
2925 /* greg, XMM */
2926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2927 IEM_MC_BEGIN(0, 1);
2928 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2929 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2930 {
2931 IEM_MC_LOCAL(uint64_t, u64Tmp);
2932 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2933 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
2934 }
2935 else
2936 {
2937 IEM_MC_LOCAL(uint32_t, u32Tmp);
2938 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2939 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
2940 }
2941 IEM_MC_ADVANCE_RIP();
2942 IEM_MC_END();
2943 }
2944 else
2945 {
2946 /* [mem], XMM */
2947 IEM_MC_BEGIN(0, 2);
2948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2949 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
2950 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2952 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2953 {
2954 IEM_MC_LOCAL(uint64_t, u64Tmp);
2955 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2956 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
2957 }
2958 else
2959 {
2960 IEM_MC_LOCAL(uint32_t, u32Tmp);
2961 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
2962 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
2963 }
2964 IEM_MC_ADVANCE_RIP();
2965 IEM_MC_END();
2966 }
2967 return VINF_SUCCESS;
2968
2969 case 0: /* MMX */
2970 IEMOP_MNEMONIC("movq/d Ed/q,Pd/q");
2971 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2972 {
2973 /* greg, MMX */
2974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2975 IEM_MC_BEGIN(0, 1);
2976 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2977 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2978 {
2979 IEM_MC_LOCAL(uint64_t, u64Tmp);
2980 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2981 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Tmp);
2982 }
2983 else
2984 {
2985 IEM_MC_LOCAL(uint32_t, u32Tmp);
2986 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2987 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Tmp);
2988 }
2989 IEM_MC_ADVANCE_RIP();
2990 IEM_MC_END();
2991 }
2992 else
2993 {
2994 /* [mem], MMX */
2995 IEM_MC_BEGIN(0, 2);
2996 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2997 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
2998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1);
2999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3000 if (pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_REX_W)
3001 {
3002 IEM_MC_LOCAL(uint64_t, u64Tmp);
3003 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3004 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3005 }
3006 else
3007 {
3008 IEM_MC_LOCAL(uint32_t, u32Tmp);
3009 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3010 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffSrc, u32Tmp);
3011 }
3012 IEM_MC_ADVANCE_RIP();
3013 IEM_MC_END();
3014 }
3015 return VINF_SUCCESS;
3016
3017 default:
3018 return IEMOP_RAISE_INVALID_OPCODE();
3019 }
3020}
3021
3022
3023/** Opcode 0x0f 0x7f. */
3024FNIEMOP_DEF(iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq)
3025{
3026 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3027 bool fAligned = false;
3028 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3029 {
3030 case IEM_OP_PRF_SIZE_OP: /* SSE aligned */
3031 fAligned = true;
3032 case IEM_OP_PRF_REPZ: /* SSE unaligned */
3033 if (fAligned)
3034 IEMOP_MNEMONIC("movdqa Wdq,Vdq");
3035 else
3036 IEMOP_MNEMONIC("movdqu Wdq,Vdq");
3037 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3038 {
3039 /*
3040 * Register, register.
3041 */
3042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3043 IEM_MC_BEGIN(0, 1);
3044 IEM_MC_LOCAL(uint128_t, u128Tmp);
3045 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3046 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3047 IEM_MC_STORE_XREG_U128((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u128Tmp);
3048 IEM_MC_ADVANCE_RIP();
3049 IEM_MC_END();
3050 }
3051 else
3052 {
3053 /*
3054 * Register, memory.
3055 */
3056 IEM_MC_BEGIN(0, 2);
3057 IEM_MC_LOCAL(uint128_t, u128Tmp);
3058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3059
3060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3062 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
3063 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
3064 if (fAligned)
3065 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3066 else
3067 IEM_MC_STORE_MEM_U128(pIemCpu->iEffSeg, GCPtrEffSrc, u128Tmp);
3068
3069 IEM_MC_ADVANCE_RIP();
3070 IEM_MC_END();
3071 }
3072 return VINF_SUCCESS;
3073
3074 case 0: /* MMX */
3075 IEMOP_MNEMONIC("movq Qq,Pq");
3076
3077 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3078 {
3079 /*
3080 * Register, register.
3081 */
3082 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */
3083 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */
3084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3085 IEM_MC_BEGIN(0, 1);
3086 IEM_MC_LOCAL(uint64_t, u64Tmp);
3087 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3088 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3089 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp);
3090 IEM_MC_ADVANCE_RIP();
3091 IEM_MC_END();
3092 }
3093 else
3094 {
3095 /*
3096 * Register, memory.
3097 */
3098 IEM_MC_BEGIN(0, 2);
3099 IEM_MC_LOCAL(uint64_t, u64Tmp);
3100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3101
3102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3104 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT();
3105 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3106 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffSrc, u64Tmp);
3107
3108 IEM_MC_ADVANCE_RIP();
3109 IEM_MC_END();
3110 }
3111 return VINF_SUCCESS;
3112
3113 default:
3114 return IEMOP_RAISE_INVALID_OPCODE();
3115 }
3116}
3117
3118
3119
3120/** Opcode 0x0f 0x80. */
3121FNIEMOP_DEF(iemOp_jo_Jv)
3122{
3123 IEMOP_MNEMONIC("jo Jv");
3124 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3125 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3126 {
3127 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3128 IEMOP_HLP_NO_LOCK_PREFIX();
3129
3130 IEM_MC_BEGIN(0, 0);
3131 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3132 IEM_MC_REL_JMP_S16(i16Imm);
3133 } IEM_MC_ELSE() {
3134 IEM_MC_ADVANCE_RIP();
3135 } IEM_MC_ENDIF();
3136 IEM_MC_END();
3137 }
3138 else
3139 {
3140 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3141 IEMOP_HLP_NO_LOCK_PREFIX();
3142
3143 IEM_MC_BEGIN(0, 0);
3144 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3145 IEM_MC_REL_JMP_S32(i32Imm);
3146 } IEM_MC_ELSE() {
3147 IEM_MC_ADVANCE_RIP();
3148 } IEM_MC_ENDIF();
3149 IEM_MC_END();
3150 }
3151 return VINF_SUCCESS;
3152}
3153
3154
3155/** Opcode 0x0f 0x81. */
3156FNIEMOP_DEF(iemOp_jno_Jv)
3157{
3158 IEMOP_MNEMONIC("jno Jv");
3159 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3160 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3161 {
3162 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3163 IEMOP_HLP_NO_LOCK_PREFIX();
3164
3165 IEM_MC_BEGIN(0, 0);
3166 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3167 IEM_MC_ADVANCE_RIP();
3168 } IEM_MC_ELSE() {
3169 IEM_MC_REL_JMP_S16(i16Imm);
3170 } IEM_MC_ENDIF();
3171 IEM_MC_END();
3172 }
3173 else
3174 {
3175 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3176 IEMOP_HLP_NO_LOCK_PREFIX();
3177
3178 IEM_MC_BEGIN(0, 0);
3179 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3180 IEM_MC_ADVANCE_RIP();
3181 } IEM_MC_ELSE() {
3182 IEM_MC_REL_JMP_S32(i32Imm);
3183 } IEM_MC_ENDIF();
3184 IEM_MC_END();
3185 }
3186 return VINF_SUCCESS;
3187}
3188
3189
3190/** Opcode 0x0f 0x82. */
3191FNIEMOP_DEF(iemOp_jc_Jv)
3192{
3193 IEMOP_MNEMONIC("jc/jb/jnae Jv");
3194 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3195 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3196 {
3197 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3198 IEMOP_HLP_NO_LOCK_PREFIX();
3199
3200 IEM_MC_BEGIN(0, 0);
3201 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3202 IEM_MC_REL_JMP_S16(i16Imm);
3203 } IEM_MC_ELSE() {
3204 IEM_MC_ADVANCE_RIP();
3205 } IEM_MC_ENDIF();
3206 IEM_MC_END();
3207 }
3208 else
3209 {
3210 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3211 IEMOP_HLP_NO_LOCK_PREFIX();
3212
3213 IEM_MC_BEGIN(0, 0);
3214 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3215 IEM_MC_REL_JMP_S32(i32Imm);
3216 } IEM_MC_ELSE() {
3217 IEM_MC_ADVANCE_RIP();
3218 } IEM_MC_ENDIF();
3219 IEM_MC_END();
3220 }
3221 return VINF_SUCCESS;
3222}
3223
3224
3225/** Opcode 0x0f 0x83. */
3226FNIEMOP_DEF(iemOp_jnc_Jv)
3227{
3228 IEMOP_MNEMONIC("jnc/jnb/jae Jv");
3229 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3230 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3231 {
3232 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3233 IEMOP_HLP_NO_LOCK_PREFIX();
3234
3235 IEM_MC_BEGIN(0, 0);
3236 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3237 IEM_MC_ADVANCE_RIP();
3238 } IEM_MC_ELSE() {
3239 IEM_MC_REL_JMP_S16(i16Imm);
3240 } IEM_MC_ENDIF();
3241 IEM_MC_END();
3242 }
3243 else
3244 {
3245 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3246 IEMOP_HLP_NO_LOCK_PREFIX();
3247
3248 IEM_MC_BEGIN(0, 0);
3249 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3250 IEM_MC_ADVANCE_RIP();
3251 } IEM_MC_ELSE() {
3252 IEM_MC_REL_JMP_S32(i32Imm);
3253 } IEM_MC_ENDIF();
3254 IEM_MC_END();
3255 }
3256 return VINF_SUCCESS;
3257}
3258
3259
3260/** Opcode 0x0f 0x84. */
3261FNIEMOP_DEF(iemOp_je_Jv)
3262{
3263 IEMOP_MNEMONIC("je/jz Jv");
3264 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3265 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3266 {
3267 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3268 IEMOP_HLP_NO_LOCK_PREFIX();
3269
3270 IEM_MC_BEGIN(0, 0);
3271 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3272 IEM_MC_REL_JMP_S16(i16Imm);
3273 } IEM_MC_ELSE() {
3274 IEM_MC_ADVANCE_RIP();
3275 } IEM_MC_ENDIF();
3276 IEM_MC_END();
3277 }
3278 else
3279 {
3280 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3281 IEMOP_HLP_NO_LOCK_PREFIX();
3282
3283 IEM_MC_BEGIN(0, 0);
3284 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3285 IEM_MC_REL_JMP_S32(i32Imm);
3286 } IEM_MC_ELSE() {
3287 IEM_MC_ADVANCE_RIP();
3288 } IEM_MC_ENDIF();
3289 IEM_MC_END();
3290 }
3291 return VINF_SUCCESS;
3292}
3293
3294
3295/** Opcode 0x0f 0x85. */
3296FNIEMOP_DEF(iemOp_jne_Jv)
3297{
3298 IEMOP_MNEMONIC("jne/jnz Jv");
3299 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3300 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3301 {
3302 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3303 IEMOP_HLP_NO_LOCK_PREFIX();
3304
3305 IEM_MC_BEGIN(0, 0);
3306 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3307 IEM_MC_ADVANCE_RIP();
3308 } IEM_MC_ELSE() {
3309 IEM_MC_REL_JMP_S16(i16Imm);
3310 } IEM_MC_ENDIF();
3311 IEM_MC_END();
3312 }
3313 else
3314 {
3315 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3316 IEMOP_HLP_NO_LOCK_PREFIX();
3317
3318 IEM_MC_BEGIN(0, 0);
3319 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3320 IEM_MC_ADVANCE_RIP();
3321 } IEM_MC_ELSE() {
3322 IEM_MC_REL_JMP_S32(i32Imm);
3323 } IEM_MC_ENDIF();
3324 IEM_MC_END();
3325 }
3326 return VINF_SUCCESS;
3327}
3328
3329
3330/** Opcode 0x0f 0x86. */
3331FNIEMOP_DEF(iemOp_jbe_Jv)
3332{
3333 IEMOP_MNEMONIC("jbe/jna Jv");
3334 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3335 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3336 {
3337 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3338 IEMOP_HLP_NO_LOCK_PREFIX();
3339
3340 IEM_MC_BEGIN(0, 0);
3341 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3342 IEM_MC_REL_JMP_S16(i16Imm);
3343 } IEM_MC_ELSE() {
3344 IEM_MC_ADVANCE_RIP();
3345 } IEM_MC_ENDIF();
3346 IEM_MC_END();
3347 }
3348 else
3349 {
3350 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3351 IEMOP_HLP_NO_LOCK_PREFIX();
3352
3353 IEM_MC_BEGIN(0, 0);
3354 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3355 IEM_MC_REL_JMP_S32(i32Imm);
3356 } IEM_MC_ELSE() {
3357 IEM_MC_ADVANCE_RIP();
3358 } IEM_MC_ENDIF();
3359 IEM_MC_END();
3360 }
3361 return VINF_SUCCESS;
3362}
3363
3364
3365/** Opcode 0x0f 0x87. */
3366FNIEMOP_DEF(iemOp_jnbe_Jv)
3367{
3368 IEMOP_MNEMONIC("jnbe/ja Jv");
3369 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3370 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3371 {
3372 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3373 IEMOP_HLP_NO_LOCK_PREFIX();
3374
3375 IEM_MC_BEGIN(0, 0);
3376 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3377 IEM_MC_ADVANCE_RIP();
3378 } IEM_MC_ELSE() {
3379 IEM_MC_REL_JMP_S16(i16Imm);
3380 } IEM_MC_ENDIF();
3381 IEM_MC_END();
3382 }
3383 else
3384 {
3385 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3386 IEMOP_HLP_NO_LOCK_PREFIX();
3387
3388 IEM_MC_BEGIN(0, 0);
3389 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3390 IEM_MC_ADVANCE_RIP();
3391 } IEM_MC_ELSE() {
3392 IEM_MC_REL_JMP_S32(i32Imm);
3393 } IEM_MC_ENDIF();
3394 IEM_MC_END();
3395 }
3396 return VINF_SUCCESS;
3397}
3398
3399
3400/** Opcode 0x0f 0x88. */
3401FNIEMOP_DEF(iemOp_js_Jv)
3402{
3403 IEMOP_MNEMONIC("js Jv");
3404 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3405 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3406 {
3407 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3408 IEMOP_HLP_NO_LOCK_PREFIX();
3409
3410 IEM_MC_BEGIN(0, 0);
3411 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3412 IEM_MC_REL_JMP_S16(i16Imm);
3413 } IEM_MC_ELSE() {
3414 IEM_MC_ADVANCE_RIP();
3415 } IEM_MC_ENDIF();
3416 IEM_MC_END();
3417 }
3418 else
3419 {
3420 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3421 IEMOP_HLP_NO_LOCK_PREFIX();
3422
3423 IEM_MC_BEGIN(0, 0);
3424 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3425 IEM_MC_REL_JMP_S32(i32Imm);
3426 } IEM_MC_ELSE() {
3427 IEM_MC_ADVANCE_RIP();
3428 } IEM_MC_ENDIF();
3429 IEM_MC_END();
3430 }
3431 return VINF_SUCCESS;
3432}
3433
3434
3435/** Opcode 0x0f 0x89. */
3436FNIEMOP_DEF(iemOp_jns_Jv)
3437{
3438 IEMOP_MNEMONIC("jns Jv");
3439 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3440 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3441 {
3442 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3443 IEMOP_HLP_NO_LOCK_PREFIX();
3444
3445 IEM_MC_BEGIN(0, 0);
3446 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3447 IEM_MC_ADVANCE_RIP();
3448 } IEM_MC_ELSE() {
3449 IEM_MC_REL_JMP_S16(i16Imm);
3450 } IEM_MC_ENDIF();
3451 IEM_MC_END();
3452 }
3453 else
3454 {
3455 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3456 IEMOP_HLP_NO_LOCK_PREFIX();
3457
3458 IEM_MC_BEGIN(0, 0);
3459 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3460 IEM_MC_ADVANCE_RIP();
3461 } IEM_MC_ELSE() {
3462 IEM_MC_REL_JMP_S32(i32Imm);
3463 } IEM_MC_ENDIF();
3464 IEM_MC_END();
3465 }
3466 return VINF_SUCCESS;
3467}
3468
3469
3470/** Opcode 0x0f 0x8a. */
3471FNIEMOP_DEF(iemOp_jp_Jv)
3472{
3473 IEMOP_MNEMONIC("jp Jv");
3474 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3475 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3476 {
3477 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3478 IEMOP_HLP_NO_LOCK_PREFIX();
3479
3480 IEM_MC_BEGIN(0, 0);
3481 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3482 IEM_MC_REL_JMP_S16(i16Imm);
3483 } IEM_MC_ELSE() {
3484 IEM_MC_ADVANCE_RIP();
3485 } IEM_MC_ENDIF();
3486 IEM_MC_END();
3487 }
3488 else
3489 {
3490 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3491 IEMOP_HLP_NO_LOCK_PREFIX();
3492
3493 IEM_MC_BEGIN(0, 0);
3494 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3495 IEM_MC_REL_JMP_S32(i32Imm);
3496 } IEM_MC_ELSE() {
3497 IEM_MC_ADVANCE_RIP();
3498 } IEM_MC_ENDIF();
3499 IEM_MC_END();
3500 }
3501 return VINF_SUCCESS;
3502}
3503
3504
3505/** Opcode 0x0f 0x8b. */
3506FNIEMOP_DEF(iemOp_jnp_Jv)
3507{
3508 IEMOP_MNEMONIC("jo Jv");
3509 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3510 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3511 {
3512 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3513 IEMOP_HLP_NO_LOCK_PREFIX();
3514
3515 IEM_MC_BEGIN(0, 0);
3516 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3517 IEM_MC_ADVANCE_RIP();
3518 } IEM_MC_ELSE() {
3519 IEM_MC_REL_JMP_S16(i16Imm);
3520 } IEM_MC_ENDIF();
3521 IEM_MC_END();
3522 }
3523 else
3524 {
3525 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3526 IEMOP_HLP_NO_LOCK_PREFIX();
3527
3528 IEM_MC_BEGIN(0, 0);
3529 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3530 IEM_MC_ADVANCE_RIP();
3531 } IEM_MC_ELSE() {
3532 IEM_MC_REL_JMP_S32(i32Imm);
3533 } IEM_MC_ENDIF();
3534 IEM_MC_END();
3535 }
3536 return VINF_SUCCESS;
3537}
3538
3539
3540/** Opcode 0x0f 0x8c. */
3541FNIEMOP_DEF(iemOp_jl_Jv)
3542{
3543 IEMOP_MNEMONIC("jl/jnge Jv");
3544 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3545 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3546 {
3547 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3548 IEMOP_HLP_NO_LOCK_PREFIX();
3549
3550 IEM_MC_BEGIN(0, 0);
3551 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3552 IEM_MC_REL_JMP_S16(i16Imm);
3553 } IEM_MC_ELSE() {
3554 IEM_MC_ADVANCE_RIP();
3555 } IEM_MC_ENDIF();
3556 IEM_MC_END();
3557 }
3558 else
3559 {
3560 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3561 IEMOP_HLP_NO_LOCK_PREFIX();
3562
3563 IEM_MC_BEGIN(0, 0);
3564 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3565 IEM_MC_REL_JMP_S32(i32Imm);
3566 } IEM_MC_ELSE() {
3567 IEM_MC_ADVANCE_RIP();
3568 } IEM_MC_ENDIF();
3569 IEM_MC_END();
3570 }
3571 return VINF_SUCCESS;
3572}
3573
3574
3575/** Opcode 0x0f 0x8d. */
3576FNIEMOP_DEF(iemOp_jnl_Jv)
3577{
3578 IEMOP_MNEMONIC("jnl/jge Jv");
3579 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3580 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3581 {
3582 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3583 IEMOP_HLP_NO_LOCK_PREFIX();
3584
3585 IEM_MC_BEGIN(0, 0);
3586 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3587 IEM_MC_ADVANCE_RIP();
3588 } IEM_MC_ELSE() {
3589 IEM_MC_REL_JMP_S16(i16Imm);
3590 } IEM_MC_ENDIF();
3591 IEM_MC_END();
3592 }
3593 else
3594 {
3595 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3596 IEMOP_HLP_NO_LOCK_PREFIX();
3597
3598 IEM_MC_BEGIN(0, 0);
3599 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3600 IEM_MC_ADVANCE_RIP();
3601 } IEM_MC_ELSE() {
3602 IEM_MC_REL_JMP_S32(i32Imm);
3603 } IEM_MC_ENDIF();
3604 IEM_MC_END();
3605 }
3606 return VINF_SUCCESS;
3607}
3608
3609
3610/** Opcode 0x0f 0x8e. */
3611FNIEMOP_DEF(iemOp_jle_Jv)
3612{
3613 IEMOP_MNEMONIC("jle/jng Jv");
3614 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3615 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3616 {
3617 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3618 IEMOP_HLP_NO_LOCK_PREFIX();
3619
3620 IEM_MC_BEGIN(0, 0);
3621 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3622 IEM_MC_REL_JMP_S16(i16Imm);
3623 } IEM_MC_ELSE() {
3624 IEM_MC_ADVANCE_RIP();
3625 } IEM_MC_ENDIF();
3626 IEM_MC_END();
3627 }
3628 else
3629 {
3630 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3631 IEMOP_HLP_NO_LOCK_PREFIX();
3632
3633 IEM_MC_BEGIN(0, 0);
3634 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3635 IEM_MC_REL_JMP_S32(i32Imm);
3636 } IEM_MC_ELSE() {
3637 IEM_MC_ADVANCE_RIP();
3638 } IEM_MC_ENDIF();
3639 IEM_MC_END();
3640 }
3641 return VINF_SUCCESS;
3642}
3643
3644
3645/** Opcode 0x0f 0x8f. */
3646FNIEMOP_DEF(iemOp_jnle_Jv)
3647{
3648 IEMOP_MNEMONIC("jnle/jg Jv");
3649 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3650 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
3651 {
3652 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
3653 IEMOP_HLP_NO_LOCK_PREFIX();
3654
3655 IEM_MC_BEGIN(0, 0);
3656 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3657 IEM_MC_ADVANCE_RIP();
3658 } IEM_MC_ELSE() {
3659 IEM_MC_REL_JMP_S16(i16Imm);
3660 } IEM_MC_ENDIF();
3661 IEM_MC_END();
3662 }
3663 else
3664 {
3665 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
3666 IEMOP_HLP_NO_LOCK_PREFIX();
3667
3668 IEM_MC_BEGIN(0, 0);
3669 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3670 IEM_MC_ADVANCE_RIP();
3671 } IEM_MC_ELSE() {
3672 IEM_MC_REL_JMP_S32(i32Imm);
3673 } IEM_MC_ENDIF();
3674 IEM_MC_END();
3675 }
3676 return VINF_SUCCESS;
3677}
3678
3679
3680/** Opcode 0x0f 0x90. */
3681FNIEMOP_DEF(iemOp_seto_Eb)
3682{
3683 IEMOP_MNEMONIC("seto Eb");
3684 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3685 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3686
3687 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3688 * any way. AMD says it's "unused", whatever that means. We're
3689 * ignoring for now. */
3690 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3691 {
3692 /* register target */
3693 IEM_MC_BEGIN(0, 0);
3694 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3695 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3696 } IEM_MC_ELSE() {
3697 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3698 } IEM_MC_ENDIF();
3699 IEM_MC_ADVANCE_RIP();
3700 IEM_MC_END();
3701 }
3702 else
3703 {
3704 /* memory target */
3705 IEM_MC_BEGIN(0, 1);
3706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3708 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3709 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3710 } IEM_MC_ELSE() {
3711 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3712 } IEM_MC_ENDIF();
3713 IEM_MC_ADVANCE_RIP();
3714 IEM_MC_END();
3715 }
3716 return VINF_SUCCESS;
3717}
3718
3719
3720/** Opcode 0x0f 0x91. */
3721FNIEMOP_DEF(iemOp_setno_Eb)
3722{
3723 IEMOP_MNEMONIC("setno Eb");
3724 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3725 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3726
3727 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3728 * any way. AMD says it's "unused", whatever that means. We're
3729 * ignoring for now. */
3730 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3731 {
3732 /* register target */
3733 IEM_MC_BEGIN(0, 0);
3734 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3735 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3736 } IEM_MC_ELSE() {
3737 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3738 } IEM_MC_ENDIF();
3739 IEM_MC_ADVANCE_RIP();
3740 IEM_MC_END();
3741 }
3742 else
3743 {
3744 /* memory target */
3745 IEM_MC_BEGIN(0, 1);
3746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3748 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3749 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3750 } IEM_MC_ELSE() {
3751 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3752 } IEM_MC_ENDIF();
3753 IEM_MC_ADVANCE_RIP();
3754 IEM_MC_END();
3755 }
3756 return VINF_SUCCESS;
3757}
3758
3759
3760/** Opcode 0x0f 0x92. */
3761FNIEMOP_DEF(iemOp_setc_Eb)
3762{
3763 IEMOP_MNEMONIC("setc Eb");
3764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3765 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3766
3767 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3768 * any way. AMD says it's "unused", whatever that means. We're
3769 * ignoring for now. */
3770 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3771 {
3772 /* register target */
3773 IEM_MC_BEGIN(0, 0);
3774 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3775 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3776 } IEM_MC_ELSE() {
3777 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3778 } IEM_MC_ENDIF();
3779 IEM_MC_ADVANCE_RIP();
3780 IEM_MC_END();
3781 }
3782 else
3783 {
3784 /* memory target */
3785 IEM_MC_BEGIN(0, 1);
3786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3788 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3789 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3790 } IEM_MC_ELSE() {
3791 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3792 } IEM_MC_ENDIF();
3793 IEM_MC_ADVANCE_RIP();
3794 IEM_MC_END();
3795 }
3796 return VINF_SUCCESS;
3797}
3798
3799
3800/** Opcode 0x0f 0x93. */
3801FNIEMOP_DEF(iemOp_setnc_Eb)
3802{
3803 IEMOP_MNEMONIC("setnc Eb");
3804 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3805 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3806
3807 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3808 * any way. AMD says it's "unused", whatever that means. We're
3809 * ignoring for now. */
3810 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3811 {
3812 /* register target */
3813 IEM_MC_BEGIN(0, 0);
3814 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3815 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3816 } IEM_MC_ELSE() {
3817 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3818 } IEM_MC_ENDIF();
3819 IEM_MC_ADVANCE_RIP();
3820 IEM_MC_END();
3821 }
3822 else
3823 {
3824 /* memory target */
3825 IEM_MC_BEGIN(0, 1);
3826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3828 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3829 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3830 } IEM_MC_ELSE() {
3831 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3832 } IEM_MC_ENDIF();
3833 IEM_MC_ADVANCE_RIP();
3834 IEM_MC_END();
3835 }
3836 return VINF_SUCCESS;
3837}
3838
3839
3840/** Opcode 0x0f 0x94. */
3841FNIEMOP_DEF(iemOp_sete_Eb)
3842{
3843 IEMOP_MNEMONIC("sete Eb");
3844 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3845 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3846
3847 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3848 * any way. AMD says it's "unused", whatever that means. We're
3849 * ignoring for now. */
3850 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3851 {
3852 /* register target */
3853 IEM_MC_BEGIN(0, 0);
3854 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3855 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3856 } IEM_MC_ELSE() {
3857 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3858 } IEM_MC_ENDIF();
3859 IEM_MC_ADVANCE_RIP();
3860 IEM_MC_END();
3861 }
3862 else
3863 {
3864 /* memory target */
3865 IEM_MC_BEGIN(0, 1);
3866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3868 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3869 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3870 } IEM_MC_ELSE() {
3871 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3872 } IEM_MC_ENDIF();
3873 IEM_MC_ADVANCE_RIP();
3874 IEM_MC_END();
3875 }
3876 return VINF_SUCCESS;
3877}
3878
3879
3880/** Opcode 0x0f 0x95. */
3881FNIEMOP_DEF(iemOp_setne_Eb)
3882{
3883 IEMOP_MNEMONIC("setne Eb");
3884 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3885 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3886
3887 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3888 * any way. AMD says it's "unused", whatever that means. We're
3889 * ignoring for now. */
3890 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3891 {
3892 /* register target */
3893 IEM_MC_BEGIN(0, 0);
3894 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3895 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3896 } IEM_MC_ELSE() {
3897 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3898 } IEM_MC_ENDIF();
3899 IEM_MC_ADVANCE_RIP();
3900 IEM_MC_END();
3901 }
3902 else
3903 {
3904 /* memory target */
3905 IEM_MC_BEGIN(0, 1);
3906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3908 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3909 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3910 } IEM_MC_ELSE() {
3911 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3912 } IEM_MC_ENDIF();
3913 IEM_MC_ADVANCE_RIP();
3914 IEM_MC_END();
3915 }
3916 return VINF_SUCCESS;
3917}
3918
3919
3920/** Opcode 0x0f 0x96. */
3921FNIEMOP_DEF(iemOp_setbe_Eb)
3922{
3923 IEMOP_MNEMONIC("setbe Eb");
3924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3925 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3926
3927 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3928 * any way. AMD says it's "unused", whatever that means. We're
3929 * ignoring for now. */
3930 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3931 {
3932 /* register target */
3933 IEM_MC_BEGIN(0, 0);
3934 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3935 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3936 } IEM_MC_ELSE() {
3937 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3938 } IEM_MC_ENDIF();
3939 IEM_MC_ADVANCE_RIP();
3940 IEM_MC_END();
3941 }
3942 else
3943 {
3944 /* memory target */
3945 IEM_MC_BEGIN(0, 1);
3946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3948 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3949 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3950 } IEM_MC_ELSE() {
3951 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3952 } IEM_MC_ENDIF();
3953 IEM_MC_ADVANCE_RIP();
3954 IEM_MC_END();
3955 }
3956 return VINF_SUCCESS;
3957}
3958
3959
3960/** Opcode 0x0f 0x97. */
3961FNIEMOP_DEF(iemOp_setnbe_Eb)
3962{
3963 IEMOP_MNEMONIC("setnbe Eb");
3964 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3965 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
3966
3967 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
3968 * any way. AMD says it's "unused", whatever that means. We're
3969 * ignoring for now. */
3970 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3971 {
3972 /* register target */
3973 IEM_MC_BEGIN(0, 0);
3974 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3975 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
3976 } IEM_MC_ELSE() {
3977 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
3978 } IEM_MC_ENDIF();
3979 IEM_MC_ADVANCE_RIP();
3980 IEM_MC_END();
3981 }
3982 else
3983 {
3984 /* memory target */
3985 IEM_MC_BEGIN(0, 1);
3986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3988 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3989 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
3990 } IEM_MC_ELSE() {
3991 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
3992 } IEM_MC_ENDIF();
3993 IEM_MC_ADVANCE_RIP();
3994 IEM_MC_END();
3995 }
3996 return VINF_SUCCESS;
3997}
3998
3999
4000/** Opcode 0x0f 0x98. */
4001FNIEMOP_DEF(iemOp_sets_Eb)
4002{
4003 IEMOP_MNEMONIC("sets Eb");
4004 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4005 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4006
4007 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4008 * any way. AMD says it's "unused", whatever that means. We're
4009 * ignoring for now. */
4010 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4011 {
4012 /* register target */
4013 IEM_MC_BEGIN(0, 0);
4014 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4015 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4016 } IEM_MC_ELSE() {
4017 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4018 } IEM_MC_ENDIF();
4019 IEM_MC_ADVANCE_RIP();
4020 IEM_MC_END();
4021 }
4022 else
4023 {
4024 /* memory target */
4025 IEM_MC_BEGIN(0, 1);
4026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4028 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4029 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4030 } IEM_MC_ELSE() {
4031 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4032 } IEM_MC_ENDIF();
4033 IEM_MC_ADVANCE_RIP();
4034 IEM_MC_END();
4035 }
4036 return VINF_SUCCESS;
4037}
4038
4039
4040/** Opcode 0x0f 0x99. */
4041FNIEMOP_DEF(iemOp_setns_Eb)
4042{
4043 IEMOP_MNEMONIC("setns Eb");
4044 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4045 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4046
4047 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4048 * any way. AMD says it's "unused", whatever that means. We're
4049 * ignoring for now. */
4050 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4051 {
4052 /* register target */
4053 IEM_MC_BEGIN(0, 0);
4054 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4055 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4056 } IEM_MC_ELSE() {
4057 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4058 } IEM_MC_ENDIF();
4059 IEM_MC_ADVANCE_RIP();
4060 IEM_MC_END();
4061 }
4062 else
4063 {
4064 /* memory target */
4065 IEM_MC_BEGIN(0, 1);
4066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4068 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
4069 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4070 } IEM_MC_ELSE() {
4071 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4072 } IEM_MC_ENDIF();
4073 IEM_MC_ADVANCE_RIP();
4074 IEM_MC_END();
4075 }
4076 return VINF_SUCCESS;
4077}
4078
4079
4080/** Opcode 0x0f 0x9a. */
4081FNIEMOP_DEF(iemOp_setp_Eb)
4082{
4083 IEMOP_MNEMONIC("setnp Eb");
4084 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4085 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4086
4087 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4088 * any way. AMD says it's "unused", whatever that means. We're
4089 * ignoring for now. */
4090 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4091 {
4092 /* register target */
4093 IEM_MC_BEGIN(0, 0);
4094 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4095 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4096 } IEM_MC_ELSE() {
4097 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4098 } IEM_MC_ENDIF();
4099 IEM_MC_ADVANCE_RIP();
4100 IEM_MC_END();
4101 }
4102 else
4103 {
4104 /* memory target */
4105 IEM_MC_BEGIN(0, 1);
4106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4108 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4109 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4110 } IEM_MC_ELSE() {
4111 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4112 } IEM_MC_ENDIF();
4113 IEM_MC_ADVANCE_RIP();
4114 IEM_MC_END();
4115 }
4116 return VINF_SUCCESS;
4117}
4118
4119
4120/** Opcode 0x0f 0x9b. */
4121FNIEMOP_DEF(iemOp_setnp_Eb)
4122{
4123 IEMOP_MNEMONIC("setnp Eb");
4124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4125 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4126
4127 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4128 * any way. AMD says it's "unused", whatever that means. We're
4129 * ignoring for now. */
4130 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4131 {
4132 /* register target */
4133 IEM_MC_BEGIN(0, 0);
4134 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4135 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4136 } IEM_MC_ELSE() {
4137 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4138 } IEM_MC_ENDIF();
4139 IEM_MC_ADVANCE_RIP();
4140 IEM_MC_END();
4141 }
4142 else
4143 {
4144 /* memory target */
4145 IEM_MC_BEGIN(0, 1);
4146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4148 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
4149 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4150 } IEM_MC_ELSE() {
4151 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4152 } IEM_MC_ENDIF();
4153 IEM_MC_ADVANCE_RIP();
4154 IEM_MC_END();
4155 }
4156 return VINF_SUCCESS;
4157}
4158
4159
4160/** Opcode 0x0f 0x9c. */
4161FNIEMOP_DEF(iemOp_setl_Eb)
4162{
4163 IEMOP_MNEMONIC("setl Eb");
4164 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4165 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4166
4167 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4168 * any way. AMD says it's "unused", whatever that means. We're
4169 * ignoring for now. */
4170 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4171 {
4172 /* register target */
4173 IEM_MC_BEGIN(0, 0);
4174 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4175 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4176 } IEM_MC_ELSE() {
4177 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4178 } IEM_MC_ENDIF();
4179 IEM_MC_ADVANCE_RIP();
4180 IEM_MC_END();
4181 }
4182 else
4183 {
4184 /* memory target */
4185 IEM_MC_BEGIN(0, 1);
4186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4188 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4189 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4190 } IEM_MC_ELSE() {
4191 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4192 } IEM_MC_ENDIF();
4193 IEM_MC_ADVANCE_RIP();
4194 IEM_MC_END();
4195 }
4196 return VINF_SUCCESS;
4197}
4198
4199
4200/** Opcode 0x0f 0x9d. */
4201FNIEMOP_DEF(iemOp_setnl_Eb)
4202{
4203 IEMOP_MNEMONIC("setnl Eb");
4204 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4205 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4206
4207 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4208 * any way. AMD says it's "unused", whatever that means. We're
4209 * ignoring for now. */
4210 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4211 {
4212 /* register target */
4213 IEM_MC_BEGIN(0, 0);
4214 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4215 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4216 } IEM_MC_ELSE() {
4217 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4218 } IEM_MC_ENDIF();
4219 IEM_MC_ADVANCE_RIP();
4220 IEM_MC_END();
4221 }
4222 else
4223 {
4224 /* memory target */
4225 IEM_MC_BEGIN(0, 1);
4226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4228 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
4229 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4230 } IEM_MC_ELSE() {
4231 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4232 } IEM_MC_ENDIF();
4233 IEM_MC_ADVANCE_RIP();
4234 IEM_MC_END();
4235 }
4236 return VINF_SUCCESS;
4237}
4238
4239
4240/** Opcode 0x0f 0x9e. */
4241FNIEMOP_DEF(iemOp_setle_Eb)
4242{
4243 IEMOP_MNEMONIC("setle Eb");
4244 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4245 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4246
4247 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4248 * any way. AMD says it's "unused", whatever that means. We're
4249 * ignoring for now. */
4250 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4251 {
4252 /* register target */
4253 IEM_MC_BEGIN(0, 0);
4254 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4255 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4256 } IEM_MC_ELSE() {
4257 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4258 } IEM_MC_ENDIF();
4259 IEM_MC_ADVANCE_RIP();
4260 IEM_MC_END();
4261 }
4262 else
4263 {
4264 /* memory target */
4265 IEM_MC_BEGIN(0, 1);
4266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4268 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4269 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4270 } IEM_MC_ELSE() {
4271 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4272 } IEM_MC_ENDIF();
4273 IEM_MC_ADVANCE_RIP();
4274 IEM_MC_END();
4275 }
4276 return VINF_SUCCESS;
4277}
4278
4279
4280/** Opcode 0x0f 0x9f. */
4281FNIEMOP_DEF(iemOp_setnle_Eb)
4282{
4283 IEMOP_MNEMONIC("setnle Eb");
4284 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4285 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4286
4287 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in
4288 * any way. AMD says it's "unused", whatever that means. We're
4289 * ignoring for now. */
4290 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4291 {
4292 /* register target */
4293 IEM_MC_BEGIN(0, 0);
4294 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4295 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 0);
4296 } IEM_MC_ELSE() {
4297 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, 1);
4298 } IEM_MC_ENDIF();
4299 IEM_MC_ADVANCE_RIP();
4300 IEM_MC_END();
4301 }
4302 else
4303 {
4304 /* memory target */
4305 IEM_MC_BEGIN(0, 1);
4306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4308 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
4309 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 0);
4310 } IEM_MC_ELSE() {
4311 IEM_MC_STORE_MEM_U8_CONST(pIemCpu->iEffSeg, GCPtrEffDst, 1);
4312 } IEM_MC_ENDIF();
4313 IEM_MC_ADVANCE_RIP();
4314 IEM_MC_END();
4315 }
4316 return VINF_SUCCESS;
4317}
4318
4319
4320/**
4321 * Common 'push segment-register' helper.
4322 */
4323FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg)
4324{
4325 IEMOP_HLP_NO_LOCK_PREFIX();
4326 if (iReg < X86_SREG_FS)
4327 IEMOP_HLP_NO_64BIT();
4328 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4329
4330 switch (pIemCpu->enmEffOpSize)
4331 {
4332 case IEMMODE_16BIT:
4333 IEM_MC_BEGIN(0, 1);
4334 IEM_MC_LOCAL(uint16_t, u16Value);
4335 IEM_MC_FETCH_SREG_U16(u16Value, iReg);
4336 IEM_MC_PUSH_U16(u16Value);
4337 IEM_MC_ADVANCE_RIP();
4338 IEM_MC_END();
4339 break;
4340
4341 case IEMMODE_32BIT:
4342 IEM_MC_BEGIN(0, 1);
4343 IEM_MC_LOCAL(uint32_t, u32Value);
4344 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg);
4345 IEM_MC_PUSH_U32_SREG(u32Value);
4346 IEM_MC_ADVANCE_RIP();
4347 IEM_MC_END();
4348 break;
4349
4350 case IEMMODE_64BIT:
4351 IEM_MC_BEGIN(0, 1);
4352 IEM_MC_LOCAL(uint64_t, u64Value);
4353 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg);
4354 IEM_MC_PUSH_U64(u64Value);
4355 IEM_MC_ADVANCE_RIP();
4356 IEM_MC_END();
4357 break;
4358 }
4359
4360 return VINF_SUCCESS;
4361}
4362
4363
4364/** Opcode 0x0f 0xa0. */
4365FNIEMOP_DEF(iemOp_push_fs)
4366{
4367 IEMOP_MNEMONIC("push fs");
4368 IEMOP_HLP_NO_LOCK_PREFIX();
4369 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS);
4370}
4371
4372
4373/** Opcode 0x0f 0xa1. */
4374FNIEMOP_DEF(iemOp_pop_fs)
4375{
4376 IEMOP_MNEMONIC("pop fs");
4377 IEMOP_HLP_NO_LOCK_PREFIX();
4378 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pIemCpu->enmEffOpSize);
4379}
4380
4381
4382/** Opcode 0x0f 0xa2. */
4383FNIEMOP_DEF(iemOp_cpuid)
4384{
4385 IEMOP_MNEMONIC("cpuid");
4386 IEMOP_HLP_NO_LOCK_PREFIX();
4387 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid);
4388}
4389
4390
4391/**
4392 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and
4393 * iemOp_bts_Ev_Gv.
4394 */
4395FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl)
4396{
4397 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4398 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
4399
4400 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4401 {
4402 /* register destination. */
4403 IEMOP_HLP_NO_LOCK_PREFIX();
4404 switch (pIemCpu->enmEffOpSize)
4405 {
4406 case IEMMODE_16BIT:
4407 IEM_MC_BEGIN(3, 0);
4408 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4409 IEM_MC_ARG(uint16_t, u16Src, 1);
4410 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4411
4412 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4413 IEM_MC_AND_LOCAL_U16(u16Src, 0xf);
4414 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4415 IEM_MC_REF_EFLAGS(pEFlags);
4416 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4417
4418 IEM_MC_ADVANCE_RIP();
4419 IEM_MC_END();
4420 return VINF_SUCCESS;
4421
4422 case IEMMODE_32BIT:
4423 IEM_MC_BEGIN(3, 0);
4424 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4425 IEM_MC_ARG(uint32_t, u32Src, 1);
4426 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4427
4428 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4429 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f);
4430 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4431 IEM_MC_REF_EFLAGS(pEFlags);
4432 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4433
4434 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4435 IEM_MC_ADVANCE_RIP();
4436 IEM_MC_END();
4437 return VINF_SUCCESS;
4438
4439 case IEMMODE_64BIT:
4440 IEM_MC_BEGIN(3, 0);
4441 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4442 IEM_MC_ARG(uint64_t, u64Src, 1);
4443 IEM_MC_ARG(uint32_t *, pEFlags, 2);
4444
4445 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4446 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f);
4447 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4448 IEM_MC_REF_EFLAGS(pEFlags);
4449 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4450
4451 IEM_MC_ADVANCE_RIP();
4452 IEM_MC_END();
4453 return VINF_SUCCESS;
4454
4455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4456 }
4457 }
4458 else
4459 {
4460 /* memory destination. */
4461
4462 uint32_t fAccess;
4463 if (pImpl->pfnLockedU16)
4464 fAccess = IEM_ACCESS_DATA_RW;
4465 else /* BT */
4466 {
4467 IEMOP_HLP_NO_LOCK_PREFIX();
4468 fAccess = IEM_ACCESS_DATA_R;
4469 }
4470
4471 NOREF(fAccess);
4472
4473 /** @todo test negative bit offsets! */
4474 switch (pIemCpu->enmEffOpSize)
4475 {
4476 case IEMMODE_16BIT:
4477 IEM_MC_BEGIN(3, 2);
4478 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4479 IEM_MC_ARG(uint16_t, u16Src, 1);
4480 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4482 IEM_MC_LOCAL(int16_t, i16AddrAdj);
4483
4484 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4485 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4486 IEM_MC_ASSIGN(i16AddrAdj, u16Src);
4487 IEM_MC_AND_ARG_U16(u16Src, 0x0f);
4488 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4);
4489 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 1);
4490 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj);
4491 IEM_MC_FETCH_EFLAGS(EFlags);
4492
4493 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4494 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4495 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
4496 else
4497 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
4498 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4499
4500 IEM_MC_COMMIT_EFLAGS(EFlags);
4501 IEM_MC_ADVANCE_RIP();
4502 IEM_MC_END();
4503 return VINF_SUCCESS;
4504
4505 case IEMMODE_32BIT:
4506 IEM_MC_BEGIN(3, 2);
4507 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4508 IEM_MC_ARG(uint32_t, u32Src, 1);
4509 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4511 IEM_MC_LOCAL(int32_t, i32AddrAdj);
4512
4513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4514 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4515 IEM_MC_ASSIGN(i32AddrAdj, u32Src);
4516 IEM_MC_AND_ARG_U32(u32Src, 0x1f);
4517 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5);
4518 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2);
4519 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj);
4520 IEM_MC_FETCH_EFLAGS(EFlags);
4521
4522 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4523 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4524 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
4525 else
4526 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
4527 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4528
4529 IEM_MC_COMMIT_EFLAGS(EFlags);
4530 IEM_MC_ADVANCE_RIP();
4531 IEM_MC_END();
4532 return VINF_SUCCESS;
4533
4534 case IEMMODE_64BIT:
4535 IEM_MC_BEGIN(3, 2);
4536 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4537 IEM_MC_ARG(uint64_t, u64Src, 1);
4538 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
4539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4540 IEM_MC_LOCAL(int64_t, i64AddrAdj);
4541
4542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4543 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4544 IEM_MC_ASSIGN(i64AddrAdj, u64Src);
4545 IEM_MC_AND_ARG_U64(u64Src, 0x3f);
4546 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6);
4547 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3);
4548 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj);
4549 IEM_MC_FETCH_EFLAGS(EFlags);
4550
4551 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4552 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
4553 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
4554 else
4555 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
4556 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4557
4558 IEM_MC_COMMIT_EFLAGS(EFlags);
4559 IEM_MC_ADVANCE_RIP();
4560 IEM_MC_END();
4561 return VINF_SUCCESS;
4562
4563 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4564 }
4565 }
4566}
4567
4568
4569/** Opcode 0x0f 0xa3. */
4570FNIEMOP_DEF(iemOp_bt_Ev_Gv)
4571{
4572 IEMOP_MNEMONIC("bt Gv,Gv");
4573 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt);
4574}
4575
4576
4577/**
4578 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib.
4579 */
4580FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl)
4581{
4582 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4583 IEMOP_HLP_NO_LOCK_PREFIX();
4584 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4585
4586 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4587 {
4588 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4589 IEMOP_HLP_NO_LOCK_PREFIX();
4590
4591 switch (pIemCpu->enmEffOpSize)
4592 {
4593 case IEMMODE_16BIT:
4594 IEM_MC_BEGIN(4, 0);
4595 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4596 IEM_MC_ARG(uint16_t, u16Src, 1);
4597 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4598 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4599
4600 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4601 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4602 IEM_MC_REF_EFLAGS(pEFlags);
4603 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4604
4605 IEM_MC_ADVANCE_RIP();
4606 IEM_MC_END();
4607 return VINF_SUCCESS;
4608
4609 case IEMMODE_32BIT:
4610 IEM_MC_BEGIN(4, 0);
4611 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4612 IEM_MC_ARG(uint32_t, u32Src, 1);
4613 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4614 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4615
4616 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4617 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4618 IEM_MC_REF_EFLAGS(pEFlags);
4619 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4620
4621 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4622 IEM_MC_ADVANCE_RIP();
4623 IEM_MC_END();
4624 return VINF_SUCCESS;
4625
4626 case IEMMODE_64BIT:
4627 IEM_MC_BEGIN(4, 0);
4628 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4629 IEM_MC_ARG(uint64_t, u64Src, 1);
4630 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2);
4631 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4632
4633 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4634 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4635 IEM_MC_REF_EFLAGS(pEFlags);
4636 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4637
4638 IEM_MC_ADVANCE_RIP();
4639 IEM_MC_END();
4640 return VINF_SUCCESS;
4641
4642 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4643 }
4644 }
4645 else
4646 {
4647 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4648
4649 switch (pIemCpu->enmEffOpSize)
4650 {
4651 case IEMMODE_16BIT:
4652 IEM_MC_BEGIN(4, 2);
4653 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4654 IEM_MC_ARG(uint16_t, u16Src, 1);
4655 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4656 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4658
4659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4660 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4661 IEM_MC_ASSIGN(cShiftArg, cShift);
4662 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4663 IEM_MC_FETCH_EFLAGS(EFlags);
4664 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4665 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4666
4667 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4668 IEM_MC_COMMIT_EFLAGS(EFlags);
4669 IEM_MC_ADVANCE_RIP();
4670 IEM_MC_END();
4671 return VINF_SUCCESS;
4672
4673 case IEMMODE_32BIT:
4674 IEM_MC_BEGIN(4, 2);
4675 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4676 IEM_MC_ARG(uint32_t, u32Src, 1);
4677 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4678 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4679 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4680
4681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4682 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4683 IEM_MC_ASSIGN(cShiftArg, cShift);
4684 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4685 IEM_MC_FETCH_EFLAGS(EFlags);
4686 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4687 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4688
4689 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4690 IEM_MC_COMMIT_EFLAGS(EFlags);
4691 IEM_MC_ADVANCE_RIP();
4692 IEM_MC_END();
4693 return VINF_SUCCESS;
4694
4695 case IEMMODE_64BIT:
4696 IEM_MC_BEGIN(4, 2);
4697 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4698 IEM_MC_ARG(uint64_t, u64Src, 1);
4699 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4700 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4702
4703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
4704 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
4705 IEM_MC_ASSIGN(cShiftArg, cShift);
4706 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4707 IEM_MC_FETCH_EFLAGS(EFlags);
4708 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4709 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4710
4711 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4712 IEM_MC_COMMIT_EFLAGS(EFlags);
4713 IEM_MC_ADVANCE_RIP();
4714 IEM_MC_END();
4715 return VINF_SUCCESS;
4716
4717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4718 }
4719 }
4720}
4721
4722
4723/**
4724 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL.
4725 */
4726FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl)
4727{
4728 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4729 IEMOP_HLP_NO_LOCK_PREFIX();
4730 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF);
4731
4732 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4733 {
4734 IEMOP_HLP_NO_LOCK_PREFIX();
4735
4736 switch (pIemCpu->enmEffOpSize)
4737 {
4738 case IEMMODE_16BIT:
4739 IEM_MC_BEGIN(4, 0);
4740 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4741 IEM_MC_ARG(uint16_t, u16Src, 1);
4742 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4743 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4744
4745 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4746 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4747 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4748 IEM_MC_REF_EFLAGS(pEFlags);
4749 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4750
4751 IEM_MC_ADVANCE_RIP();
4752 IEM_MC_END();
4753 return VINF_SUCCESS;
4754
4755 case IEMMODE_32BIT:
4756 IEM_MC_BEGIN(4, 0);
4757 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4758 IEM_MC_ARG(uint32_t, u32Src, 1);
4759 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4760 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4761
4762 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4763 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4764 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4765 IEM_MC_REF_EFLAGS(pEFlags);
4766 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4767
4768 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
4769 IEM_MC_ADVANCE_RIP();
4770 IEM_MC_END();
4771 return VINF_SUCCESS;
4772
4773 case IEMMODE_64BIT:
4774 IEM_MC_BEGIN(4, 0);
4775 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4776 IEM_MC_ARG(uint64_t, u64Src, 1);
4777 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4778 IEM_MC_ARG(uint32_t *, pEFlags, 3);
4779
4780 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4781 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
4782 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4783 IEM_MC_REF_EFLAGS(pEFlags);
4784 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4785
4786 IEM_MC_ADVANCE_RIP();
4787 IEM_MC_END();
4788 return VINF_SUCCESS;
4789
4790 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4791 }
4792 }
4793 else
4794 {
4795 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo too early? */
4796
4797 switch (pIemCpu->enmEffOpSize)
4798 {
4799 case IEMMODE_16BIT:
4800 IEM_MC_BEGIN(4, 2);
4801 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
4802 IEM_MC_ARG(uint16_t, u16Src, 1);
4803 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4804 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4806
4807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4808 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4809 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4810 IEM_MC_FETCH_EFLAGS(EFlags);
4811 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4812 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags);
4813
4814 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
4815 IEM_MC_COMMIT_EFLAGS(EFlags);
4816 IEM_MC_ADVANCE_RIP();
4817 IEM_MC_END();
4818 return VINF_SUCCESS;
4819
4820 case IEMMODE_32BIT:
4821 IEM_MC_BEGIN(4, 2);
4822 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
4823 IEM_MC_ARG(uint32_t, u32Src, 1);
4824 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4825 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4827
4828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4829 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4830 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4831 IEM_MC_FETCH_EFLAGS(EFlags);
4832 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4833 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags);
4834
4835 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
4836 IEM_MC_COMMIT_EFLAGS(EFlags);
4837 IEM_MC_ADVANCE_RIP();
4838 IEM_MC_END();
4839 return VINF_SUCCESS;
4840
4841 case IEMMODE_64BIT:
4842 IEM_MC_BEGIN(4, 2);
4843 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
4844 IEM_MC_ARG(uint64_t, u64Src, 1);
4845 IEM_MC_ARG(uint8_t, cShiftArg, 2);
4846 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
4847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4848
4849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4850 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
4851 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
4852 IEM_MC_FETCH_EFLAGS(EFlags);
4853 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
4854 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags);
4855
4856 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
4857 IEM_MC_COMMIT_EFLAGS(EFlags);
4858 IEM_MC_ADVANCE_RIP();
4859 IEM_MC_END();
4860 return VINF_SUCCESS;
4861
4862 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4863 }
4864 }
4865}
4866
4867
4868
4869/** Opcode 0x0f 0xa4. */
4870FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib)
4871{
4872 IEMOP_MNEMONIC("shld Ev,Gv,Ib");
4873 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld);
4874}
4875
4876
4877/** Opcode 0x0f 0xa7. */
4878FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL)
4879{
4880 IEMOP_MNEMONIC("shld Ev,Gv,CL");
4881 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld);
4882}
4883
4884
4885/** Opcode 0x0f 0xa8. */
4886FNIEMOP_DEF(iemOp_push_gs)
4887{
4888 IEMOP_MNEMONIC("push gs");
4889 IEMOP_HLP_NO_LOCK_PREFIX();
4890 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS);
4891}
4892
4893
4894/** Opcode 0x0f 0xa9. */
4895FNIEMOP_DEF(iemOp_pop_gs)
4896{
4897 IEMOP_MNEMONIC("pop gs");
4898 IEMOP_HLP_NO_LOCK_PREFIX();
4899 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pIemCpu->enmEffOpSize);
4900}
4901
4902
4903/** Opcode 0x0f 0xaa. */
4904FNIEMOP_STUB(iemOp_rsm);
4905
4906
4907/** Opcode 0x0f 0xab. */
4908FNIEMOP_DEF(iemOp_bts_Ev_Gv)
4909{
4910 IEMOP_MNEMONIC("bts Ev,Gv");
4911 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts);
4912}
4913
4914
4915/** Opcode 0x0f 0xac. */
4916FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib)
4917{
4918 IEMOP_MNEMONIC("shrd Ev,Gv,Ib");
4919 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd);
4920}
4921
4922
4923/** Opcode 0x0f 0xad. */
4924FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL)
4925{
4926 IEMOP_MNEMONIC("shrd Ev,Gv,CL");
4927 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd);
4928}
4929
4930
4931/** Opcode 0x0f 0xae mem/0. */
4932FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm)
4933{
4934 IEMOP_MNEMONIC("fxsave m512");
4935 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
4936 return IEMOP_RAISE_INVALID_OPCODE();
4937
4938 IEM_MC_BEGIN(3, 1);
4939 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4940 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4941 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
4942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4944 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
4945 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize);
4946 IEM_MC_END();
4947 return VINF_SUCCESS;
4948}
4949
4950
4951/** Opcode 0x0f 0xae mem/1. */
4952FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm)
4953{
4954 IEMOP_MNEMONIC("fxrstor m512");
4955 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fFxSaveRstor)
4956 return IEMOP_RAISE_INVALID_OPCODE();
4957
4958 IEM_MC_BEGIN(3, 1);
4959 IEM_MC_ARG(uint8_t, iEffSeg, 0);
4960 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1);
4961 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 2);
4962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
4963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4964 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
4965 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize);
4966 IEM_MC_END();
4967 return VINF_SUCCESS;
4968}
4969
4970
4971/** Opcode 0x0f 0xae mem/2. */
4972FNIEMOP_STUB_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm);
4973
4974/** Opcode 0x0f 0xae mem/3. */
4975FNIEMOP_STUB_1(iemOp_Grp15_stmxcsr, uint8_t, bRm);
4976
4977/** Opcode 0x0f 0xae mem/4. */
4978FNIEMOP_UD_STUB_1(iemOp_Grp15_xsave, uint8_t, bRm);
4979
4980/** Opcode 0x0f 0xae mem/5. */
4981FNIEMOP_UD_STUB_1(iemOp_Grp15_xrstor, uint8_t, bRm);
4982
4983/** Opcode 0x0f 0xae mem/6. */
4984FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm);
4985
4986/** Opcode 0x0f 0xae mem/7. */
4987FNIEMOP_STUB_1(iemOp_Grp15_clflush, uint8_t, bRm);
4988
4989
4990/** Opcode 0x0f 0xae 11b/5. */
4991FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm)
4992{
4993 IEMOP_MNEMONIC("lfence");
4994 IEMOP_HLP_NO_LOCK_PREFIX();
4995 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
4996 return IEMOP_RAISE_INVALID_OPCODE();
4997
4998 IEM_MC_BEGIN(0, 0);
4999 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5000 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence);
5001 else
5002 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5003 IEM_MC_ADVANCE_RIP();
5004 IEM_MC_END();
5005 return VINF_SUCCESS;
5006}
5007
5008
5009/** Opcode 0x0f 0xae 11b/6. */
5010FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm)
5011{
5012 IEMOP_MNEMONIC("mfence");
5013 IEMOP_HLP_NO_LOCK_PREFIX();
5014 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5015 return IEMOP_RAISE_INVALID_OPCODE();
5016
5017 IEM_MC_BEGIN(0, 0);
5018 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5019 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence);
5020 else
5021 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5022 IEM_MC_ADVANCE_RIP();
5023 IEM_MC_END();
5024 return VINF_SUCCESS;
5025}
5026
5027
5028/** Opcode 0x0f 0xae 11b/7. */
5029FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm)
5030{
5031 IEMOP_MNEMONIC("sfence");
5032 IEMOP_HLP_NO_LOCK_PREFIX();
5033 if (!IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fSse2)
5034 return IEMOP_RAISE_INVALID_OPCODE();
5035
5036 IEM_MC_BEGIN(0, 0);
5037 if (IEM_GET_HOST_CPU_FEATURES(pIemCpu)->fSse2)
5038 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence);
5039 else
5040 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence);
5041 IEM_MC_ADVANCE_RIP();
5042 IEM_MC_END();
5043 return VINF_SUCCESS;
5044}
5045
5046
5047/** Opcode 0xf3 0x0f 0xae 11b/0. */
5048FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm);
5049
5050/** Opcode 0xf3 0x0f 0xae 11b/1. */
5051FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm);
5052
5053/** Opcode 0xf3 0x0f 0xae 11b/2. */
5054FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm);
5055
5056/** Opcode 0xf3 0x0f 0xae 11b/3. */
5057FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm);
5058
5059
5060/** Opcode 0x0f 0xae. */
5061FNIEMOP_DEF(iemOp_Grp15)
5062{
5063 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5064 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5065 {
5066 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5067 {
5068 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_fxsave, bRm);
5069 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_fxrstor, bRm);
5070 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_ldmxcsr, bRm);
5071 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_stmxcsr, bRm);
5072 case 4: return FNIEMOP_CALL_1(iemOp_Grp15_xsave, bRm);
5073 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_xrstor, bRm);
5074 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_xsaveopt,bRm);
5075 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_clflush, bRm);
5076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5077 }
5078 }
5079 else
5080 {
5081 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_LOCK))
5082 {
5083 case 0:
5084 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5085 {
5086 case 0: return IEMOP_RAISE_INVALID_OPCODE();
5087 case 1: return IEMOP_RAISE_INVALID_OPCODE();
5088 case 2: return IEMOP_RAISE_INVALID_OPCODE();
5089 case 3: return IEMOP_RAISE_INVALID_OPCODE();
5090 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5091 case 5: return FNIEMOP_CALL_1(iemOp_Grp15_lfence, bRm);
5092 case 6: return FNIEMOP_CALL_1(iemOp_Grp15_mfence, bRm);
5093 case 7: return FNIEMOP_CALL_1(iemOp_Grp15_sfence, bRm);
5094 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5095 }
5096 break;
5097
5098 case IEM_OP_PRF_REPZ:
5099 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5100 {
5101 case 0: return FNIEMOP_CALL_1(iemOp_Grp15_rdfsbase, bRm);
5102 case 1: return FNIEMOP_CALL_1(iemOp_Grp15_rdgsbase, bRm);
5103 case 2: return FNIEMOP_CALL_1(iemOp_Grp15_wrfsbase, bRm);
5104 case 3: return FNIEMOP_CALL_1(iemOp_Grp15_wrgsbase, bRm);
5105 case 4: return IEMOP_RAISE_INVALID_OPCODE();
5106 case 5: return IEMOP_RAISE_INVALID_OPCODE();
5107 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5108 case 7: return IEMOP_RAISE_INVALID_OPCODE();
5109 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5110 }
5111 break;
5112
5113 default:
5114 return IEMOP_RAISE_INVALID_OPCODE();
5115 }
5116 }
5117}
5118
5119
5120/** Opcode 0x0f 0xaf. */
5121FNIEMOP_DEF(iemOp_imul_Gv_Ev)
5122{
5123 IEMOP_MNEMONIC("imul Gv,Ev");
5124 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5125 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two);
5126}
5127
5128
5129/** Opcode 0x0f 0xb0. */
5130FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb)
5131{
5132 IEMOP_MNEMONIC("cmpxchg Eb,Gb");
5133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5134
5135 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5136 {
5137 IEMOP_HLP_DONE_DECODING();
5138 IEM_MC_BEGIN(4, 0);
5139 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5140 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5141 IEM_MC_ARG(uint8_t, u8Src, 2);
5142 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5143
5144 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5145 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5146 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX);
5147 IEM_MC_REF_EFLAGS(pEFlags);
5148 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5149 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5150 else
5151 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5152
5153 IEM_MC_ADVANCE_RIP();
5154 IEM_MC_END();
5155 }
5156 else
5157 {
5158 IEM_MC_BEGIN(4, 3);
5159 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5160 IEM_MC_ARG(uint8_t *, pu8Al, 1);
5161 IEM_MC_ARG(uint8_t, u8Src, 2);
5162 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5164 IEM_MC_LOCAL(uint8_t, u8Al);
5165
5166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5167 IEMOP_HLP_DONE_DECODING();
5168 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5169 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5170 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX);
5171 IEM_MC_FETCH_EFLAGS(EFlags);
5172 IEM_MC_REF_LOCAL(pu8Al, u8Al);
5173 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5174 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags);
5175 else
5176 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags);
5177
5178 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5179 IEM_MC_COMMIT_EFLAGS(EFlags);
5180 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al);
5181 IEM_MC_ADVANCE_RIP();
5182 IEM_MC_END();
5183 }
5184 return VINF_SUCCESS;
5185}
5186
5187/** Opcode 0x0f 0xb1. */
5188FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv)
5189{
5190 IEMOP_MNEMONIC("cmpxchg Ev,Gv");
5191 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5192
5193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5194 {
5195 IEMOP_HLP_DONE_DECODING();
5196 switch (pIemCpu->enmEffOpSize)
5197 {
5198 case IEMMODE_16BIT:
5199 IEM_MC_BEGIN(4, 0);
5200 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5201 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5202 IEM_MC_ARG(uint16_t, u16Src, 2);
5203 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5204
5205 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5206 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5207 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX);
5208 IEM_MC_REF_EFLAGS(pEFlags);
5209 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5210 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5211 else
5212 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5213
5214 IEM_MC_ADVANCE_RIP();
5215 IEM_MC_END();
5216 return VINF_SUCCESS;
5217
5218 case IEMMODE_32BIT:
5219 IEM_MC_BEGIN(4, 0);
5220 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5221 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5222 IEM_MC_ARG(uint32_t, u32Src, 2);
5223 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5224
5225 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5226 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5227 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX);
5228 IEM_MC_REF_EFLAGS(pEFlags);
5229 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5230 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5231 else
5232 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5233
5234 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax);
5235 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5236 IEM_MC_ADVANCE_RIP();
5237 IEM_MC_END();
5238 return VINF_SUCCESS;
5239
5240 case IEMMODE_64BIT:
5241 IEM_MC_BEGIN(4, 0);
5242 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5243 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5244#ifdef RT_ARCH_X86
5245 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5246#else
5247 IEM_MC_ARG(uint64_t, u64Src, 2);
5248#endif
5249 IEM_MC_ARG(uint32_t *, pEFlags, 3);
5250
5251 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5252 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX);
5253 IEM_MC_REF_EFLAGS(pEFlags);
5254#ifdef RT_ARCH_X86
5255 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5256 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5257 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5258 else
5259 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5260#else
5261 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5262 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5263 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5264 else
5265 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5266#endif
5267
5268 IEM_MC_ADVANCE_RIP();
5269 IEM_MC_END();
5270 return VINF_SUCCESS;
5271
5272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5273 }
5274 }
5275 else
5276 {
5277 switch (pIemCpu->enmEffOpSize)
5278 {
5279 case IEMMODE_16BIT:
5280 IEM_MC_BEGIN(4, 3);
5281 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5282 IEM_MC_ARG(uint16_t *, pu16Ax, 1);
5283 IEM_MC_ARG(uint16_t, u16Src, 2);
5284 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5286 IEM_MC_LOCAL(uint16_t, u16Ax);
5287
5288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5289 IEMOP_HLP_DONE_DECODING();
5290 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5291 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5292 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX);
5293 IEM_MC_FETCH_EFLAGS(EFlags);
5294 IEM_MC_REF_LOCAL(pu16Ax, u16Ax);
5295 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5296 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags);
5297 else
5298 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags);
5299
5300 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5301 IEM_MC_COMMIT_EFLAGS(EFlags);
5302 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax);
5303 IEM_MC_ADVANCE_RIP();
5304 IEM_MC_END();
5305 return VINF_SUCCESS;
5306
5307 case IEMMODE_32BIT:
5308 IEM_MC_BEGIN(4, 3);
5309 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5310 IEM_MC_ARG(uint32_t *, pu32Eax, 1);
5311 IEM_MC_ARG(uint32_t, u32Src, 2);
5312 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5314 IEM_MC_LOCAL(uint32_t, u32Eax);
5315
5316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5317 IEMOP_HLP_DONE_DECODING();
5318 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5319 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5320 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX);
5321 IEM_MC_FETCH_EFLAGS(EFlags);
5322 IEM_MC_REF_LOCAL(pu32Eax, u32Eax);
5323 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5324 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags);
5325 else
5326 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags);
5327
5328 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5329 IEM_MC_COMMIT_EFLAGS(EFlags);
5330 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax);
5331 IEM_MC_ADVANCE_RIP();
5332 IEM_MC_END();
5333 return VINF_SUCCESS;
5334
5335 case IEMMODE_64BIT:
5336 IEM_MC_BEGIN(4, 3);
5337 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5338 IEM_MC_ARG(uint64_t *, pu64Rax, 1);
5339#ifdef RT_ARCH_X86
5340 IEM_MC_ARG(uint64_t *, pu64Src, 2);
5341#else
5342 IEM_MC_ARG(uint64_t, u64Src, 2);
5343#endif
5344 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3);
5345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5346 IEM_MC_LOCAL(uint64_t, u64Rax);
5347
5348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5349 IEMOP_HLP_DONE_DECODING();
5350 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5351 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX);
5352 IEM_MC_FETCH_EFLAGS(EFlags);
5353 IEM_MC_REF_LOCAL(pu64Rax, u64Rax);
5354#ifdef RT_ARCH_X86
5355 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5356 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5357 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags);
5358 else
5359 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags);
5360#else
5361 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
5362 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5363 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags);
5364 else
5365 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags);
5366#endif
5367
5368 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5369 IEM_MC_COMMIT_EFLAGS(EFlags);
5370 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax);
5371 IEM_MC_ADVANCE_RIP();
5372 IEM_MC_END();
5373 return VINF_SUCCESS;
5374
5375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5376 }
5377 }
5378}
5379
5380
5381FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm)
5382{
5383 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */
5384 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg;
5385
5386 switch (pIemCpu->enmEffOpSize)
5387 {
5388 case IEMMODE_16BIT:
5389 IEM_MC_BEGIN(5, 1);
5390 IEM_MC_ARG(uint16_t, uSel, 0);
5391 IEM_MC_ARG(uint16_t, offSeg, 1);
5392 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5393 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5394 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5395 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5398 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5399 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 2);
5400 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5401 IEM_MC_END();
5402 return VINF_SUCCESS;
5403
5404 case IEMMODE_32BIT:
5405 IEM_MC_BEGIN(5, 1);
5406 IEM_MC_ARG(uint16_t, uSel, 0);
5407 IEM_MC_ARG(uint32_t, offSeg, 1);
5408 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5409 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5410 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5411 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5414 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5415 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 4);
5416 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5417 IEM_MC_END();
5418 return VINF_SUCCESS;
5419
5420 case IEMMODE_64BIT:
5421 IEM_MC_BEGIN(5, 1);
5422 IEM_MC_ARG(uint16_t, uSel, 0);
5423 IEM_MC_ARG(uint64_t, offSeg, 1);
5424 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2);
5425 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3);
5426 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pIemCpu->enmEffOpSize, 4);
5427 IEM_MC_LOCAL(RTGCPTR, GCPtrEff);
5428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0);
5429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5430 if (IEM_IS_GUEST_CPU_AMD(pIemCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */
5431 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5432 else
5433 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEff);
5434 IEM_MC_FETCH_MEM_U16_DISP(uSel, pIemCpu->iEffSeg, GCPtrEff, 8);
5435 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize);
5436 IEM_MC_END();
5437 return VINF_SUCCESS;
5438
5439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5440 }
5441}
5442
5443
5444/** Opcode 0x0f 0xb2. */
5445FNIEMOP_DEF(iemOp_lss_Gv_Mp)
5446{
5447 IEMOP_MNEMONIC("lss Gv,Mp");
5448 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5449 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5450 return IEMOP_RAISE_INVALID_OPCODE();
5451 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm);
5452}
5453
5454
5455/** Opcode 0x0f 0xb3. */
5456FNIEMOP_DEF(iemOp_btr_Ev_Gv)
5457{
5458 IEMOP_MNEMONIC("btr Ev,Gv");
5459 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr);
5460}
5461
5462
5463/** Opcode 0x0f 0xb4. */
5464FNIEMOP_DEF(iemOp_lfs_Gv_Mp)
5465{
5466 IEMOP_MNEMONIC("lfs Gv,Mp");
5467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5468 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5469 return IEMOP_RAISE_INVALID_OPCODE();
5470 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm);
5471}
5472
5473
5474/** Opcode 0x0f 0xb5. */
5475FNIEMOP_DEF(iemOp_lgs_Gv_Mp)
5476{
5477 IEMOP_MNEMONIC("lgs Gv,Mp");
5478 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5479 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5480 return IEMOP_RAISE_INVALID_OPCODE();
5481 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm);
5482}
5483
5484
5485/** Opcode 0x0f 0xb6. */
5486FNIEMOP_DEF(iemOp_movzx_Gv_Eb)
5487{
5488 IEMOP_MNEMONIC("movzx Gv,Eb");
5489
5490 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5491 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5492
5493 /*
5494 * If rm is denoting a register, no more instruction bytes.
5495 */
5496 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5497 {
5498 switch (pIemCpu->enmEffOpSize)
5499 {
5500 case IEMMODE_16BIT:
5501 IEM_MC_BEGIN(0, 1);
5502 IEM_MC_LOCAL(uint16_t, u16Value);
5503 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5504 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5505 IEM_MC_ADVANCE_RIP();
5506 IEM_MC_END();
5507 return VINF_SUCCESS;
5508
5509 case IEMMODE_32BIT:
5510 IEM_MC_BEGIN(0, 1);
5511 IEM_MC_LOCAL(uint32_t, u32Value);
5512 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5513 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5514 IEM_MC_ADVANCE_RIP();
5515 IEM_MC_END();
5516 return VINF_SUCCESS;
5517
5518 case IEMMODE_64BIT:
5519 IEM_MC_BEGIN(0, 1);
5520 IEM_MC_LOCAL(uint64_t, u64Value);
5521 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5522 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5523 IEM_MC_ADVANCE_RIP();
5524 IEM_MC_END();
5525 return VINF_SUCCESS;
5526
5527 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5528 }
5529 }
5530 else
5531 {
5532 /*
5533 * We're loading a register from memory.
5534 */
5535 switch (pIemCpu->enmEffOpSize)
5536 {
5537 case IEMMODE_16BIT:
5538 IEM_MC_BEGIN(0, 2);
5539 IEM_MC_LOCAL(uint16_t, u16Value);
5540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5542 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5543 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5544 IEM_MC_ADVANCE_RIP();
5545 IEM_MC_END();
5546 return VINF_SUCCESS;
5547
5548 case IEMMODE_32BIT:
5549 IEM_MC_BEGIN(0, 2);
5550 IEM_MC_LOCAL(uint32_t, u32Value);
5551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5553 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5554 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5555 IEM_MC_ADVANCE_RIP();
5556 IEM_MC_END();
5557 return VINF_SUCCESS;
5558
5559 case IEMMODE_64BIT:
5560 IEM_MC_BEGIN(0, 2);
5561 IEM_MC_LOCAL(uint64_t, u64Value);
5562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5564 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5565 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5566 IEM_MC_ADVANCE_RIP();
5567 IEM_MC_END();
5568 return VINF_SUCCESS;
5569
5570 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5571 }
5572 }
5573}
5574
5575
5576/** Opcode 0x0f 0xb7. */
5577FNIEMOP_DEF(iemOp_movzx_Gv_Ew)
5578{
5579 IEMOP_MNEMONIC("movzx Gv,Ew");
5580
5581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5582 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5583
5584 /** @todo Not entirely sure how the operand size prefix is handled here,
5585 * assuming that it will be ignored. Would be nice to have a few
5586 * test for this. */
5587 /*
5588 * If rm is denoting a register, no more instruction bytes.
5589 */
5590 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5591 {
5592 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5593 {
5594 IEM_MC_BEGIN(0, 1);
5595 IEM_MC_LOCAL(uint32_t, u32Value);
5596 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5597 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5598 IEM_MC_ADVANCE_RIP();
5599 IEM_MC_END();
5600 }
5601 else
5602 {
5603 IEM_MC_BEGIN(0, 1);
5604 IEM_MC_LOCAL(uint64_t, u64Value);
5605 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5606 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5607 IEM_MC_ADVANCE_RIP();
5608 IEM_MC_END();
5609 }
5610 }
5611 else
5612 {
5613 /*
5614 * We're loading a register from memory.
5615 */
5616 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5617 {
5618 IEM_MC_BEGIN(0, 2);
5619 IEM_MC_LOCAL(uint32_t, u32Value);
5620 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5622 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5623 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5624 IEM_MC_ADVANCE_RIP();
5625 IEM_MC_END();
5626 }
5627 else
5628 {
5629 IEM_MC_BEGIN(0, 2);
5630 IEM_MC_LOCAL(uint64_t, u64Value);
5631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5633 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5634 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5635 IEM_MC_ADVANCE_RIP();
5636 IEM_MC_END();
5637 }
5638 }
5639 return VINF_SUCCESS;
5640}
5641
5642
5643/** Opcode 0x0f 0xb8. */
5644FNIEMOP_STUB(iemOp_popcnt_Gv_Ev_jmpe);
5645
5646
5647/** Opcode 0x0f 0xb9. */
5648FNIEMOP_DEF(iemOp_Grp10)
5649{
5650 Log(("iemOp_Grp10 -> #UD\n"));
5651 return IEMOP_RAISE_INVALID_OPCODE();
5652}
5653
5654
5655/** Opcode 0x0f 0xba. */
5656FNIEMOP_DEF(iemOp_Grp8)
5657{
5658 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5659 PCIEMOPBINSIZES pImpl;
5660 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5661 {
5662 case 0: case 1: case 2: case 3:
5663 return IEMOP_RAISE_INVALID_OPCODE();
5664 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC("bt Ev,Ib"); break;
5665 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC("bts Ev,Ib"); break;
5666 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC("btr Ev,Ib"); break;
5667 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC("btc Ev,Ib"); break;
5668 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5669 }
5670 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
5671
5672 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5673 {
5674 /* register destination. */
5675 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5676 IEMOP_HLP_NO_LOCK_PREFIX();
5677
5678 switch (pIemCpu->enmEffOpSize)
5679 {
5680 case IEMMODE_16BIT:
5681 IEM_MC_BEGIN(3, 0);
5682 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5683 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1);
5684 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5685
5686 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5687 IEM_MC_REF_EFLAGS(pEFlags);
5688 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5689
5690 IEM_MC_ADVANCE_RIP();
5691 IEM_MC_END();
5692 return VINF_SUCCESS;
5693
5694 case IEMMODE_32BIT:
5695 IEM_MC_BEGIN(3, 0);
5696 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5697 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1);
5698 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5699
5700 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5701 IEM_MC_REF_EFLAGS(pEFlags);
5702 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5703
5704 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5705 IEM_MC_ADVANCE_RIP();
5706 IEM_MC_END();
5707 return VINF_SUCCESS;
5708
5709 case IEMMODE_64BIT:
5710 IEM_MC_BEGIN(3, 0);
5711 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5712 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1);
5713 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5714
5715 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5716 IEM_MC_REF_EFLAGS(pEFlags);
5717 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5718
5719 IEM_MC_ADVANCE_RIP();
5720 IEM_MC_END();
5721 return VINF_SUCCESS;
5722
5723 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5724 }
5725 }
5726 else
5727 {
5728 /* memory destination. */
5729
5730 uint32_t fAccess;
5731 if (pImpl->pfnLockedU16)
5732 fAccess = IEM_ACCESS_DATA_RW;
5733 else /* BT */
5734 {
5735 IEMOP_HLP_NO_LOCK_PREFIX();
5736 fAccess = IEM_ACCESS_DATA_R;
5737 }
5738
5739 /** @todo test negative bit offsets! */
5740 switch (pIemCpu->enmEffOpSize)
5741 {
5742 case IEMMODE_16BIT:
5743 IEM_MC_BEGIN(3, 1);
5744 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5745 IEM_MC_ARG(uint16_t, u16Src, 1);
5746 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5748
5749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5750 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5751 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f);
5752 IEM_MC_FETCH_EFLAGS(EFlags);
5753 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5754 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5755 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
5756 else
5757 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
5758 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
5759
5760 IEM_MC_COMMIT_EFLAGS(EFlags);
5761 IEM_MC_ADVANCE_RIP();
5762 IEM_MC_END();
5763 return VINF_SUCCESS;
5764
5765 case IEMMODE_32BIT:
5766 IEM_MC_BEGIN(3, 1);
5767 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5768 IEM_MC_ARG(uint32_t, u32Src, 1);
5769 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5770 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5771
5772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5773 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5774 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f);
5775 IEM_MC_FETCH_EFLAGS(EFlags);
5776 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5777 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5778 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
5779 else
5780 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
5781 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
5782
5783 IEM_MC_COMMIT_EFLAGS(EFlags);
5784 IEM_MC_ADVANCE_RIP();
5785 IEM_MC_END();
5786 return VINF_SUCCESS;
5787
5788 case IEMMODE_64BIT:
5789 IEM_MC_BEGIN(3, 1);
5790 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5791 IEM_MC_ARG(uint64_t, u64Src, 1);
5792 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
5793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5794
5795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5796 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit);
5797 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f);
5798 IEM_MC_FETCH_EFLAGS(EFlags);
5799 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0);
5800 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
5801 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
5802 else
5803 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
5804 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
5805
5806 IEM_MC_COMMIT_EFLAGS(EFlags);
5807 IEM_MC_ADVANCE_RIP();
5808 IEM_MC_END();
5809 return VINF_SUCCESS;
5810
5811 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5812 }
5813 }
5814
5815}
5816
5817
5818/** Opcode 0x0f 0xbb. */
5819FNIEMOP_DEF(iemOp_btc_Ev_Gv)
5820{
5821 IEMOP_MNEMONIC("btc Ev,Gv");
5822 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc);
5823}
5824
5825
5826/** Opcode 0x0f 0xbc. */
5827FNIEMOP_DEF(iemOp_bsf_Gv_Ev)
5828{
5829 IEMOP_MNEMONIC("bsf Gv,Ev");
5830 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5831 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf);
5832}
5833
5834
5835/** Opcode 0x0f 0xbd. */
5836FNIEMOP_DEF(iemOp_bsr_Gv_Ev)
5837{
5838 IEMOP_MNEMONIC("bsr Gv,Ev");
5839 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5840 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr);
5841}
5842
5843
5844/** Opcode 0x0f 0xbe. */
5845FNIEMOP_DEF(iemOp_movsx_Gv_Eb)
5846{
5847 IEMOP_MNEMONIC("movsx Gv,Eb");
5848
5849 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5850 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5851
5852 /*
5853 * If rm is denoting a register, no more instruction bytes.
5854 */
5855 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5856 {
5857 switch (pIemCpu->enmEffOpSize)
5858 {
5859 case IEMMODE_16BIT:
5860 IEM_MC_BEGIN(0, 1);
5861 IEM_MC_LOCAL(uint16_t, u16Value);
5862 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5863 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5864 IEM_MC_ADVANCE_RIP();
5865 IEM_MC_END();
5866 return VINF_SUCCESS;
5867
5868 case IEMMODE_32BIT:
5869 IEM_MC_BEGIN(0, 1);
5870 IEM_MC_LOCAL(uint32_t, u32Value);
5871 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5872 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5873 IEM_MC_ADVANCE_RIP();
5874 IEM_MC_END();
5875 return VINF_SUCCESS;
5876
5877 case IEMMODE_64BIT:
5878 IEM_MC_BEGIN(0, 1);
5879 IEM_MC_LOCAL(uint64_t, u64Value);
5880 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5881 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5882 IEM_MC_ADVANCE_RIP();
5883 IEM_MC_END();
5884 return VINF_SUCCESS;
5885
5886 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5887 }
5888 }
5889 else
5890 {
5891 /*
5892 * We're loading a register from memory.
5893 */
5894 switch (pIemCpu->enmEffOpSize)
5895 {
5896 case IEMMODE_16BIT:
5897 IEM_MC_BEGIN(0, 2);
5898 IEM_MC_LOCAL(uint16_t, u16Value);
5899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5901 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
5902 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
5903 IEM_MC_ADVANCE_RIP();
5904 IEM_MC_END();
5905 return VINF_SUCCESS;
5906
5907 case IEMMODE_32BIT:
5908 IEM_MC_BEGIN(0, 2);
5909 IEM_MC_LOCAL(uint32_t, u32Value);
5910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5912 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5913 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5914 IEM_MC_ADVANCE_RIP();
5915 IEM_MC_END();
5916 return VINF_SUCCESS;
5917
5918 case IEMMODE_64BIT:
5919 IEM_MC_BEGIN(0, 2);
5920 IEM_MC_LOCAL(uint64_t, u64Value);
5921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5923 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5924 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5925 IEM_MC_ADVANCE_RIP();
5926 IEM_MC_END();
5927 return VINF_SUCCESS;
5928
5929 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5930 }
5931 }
5932}
5933
5934
5935/** Opcode 0x0f 0xbf. */
5936FNIEMOP_DEF(iemOp_movsx_Gv_Ew)
5937{
5938 IEMOP_MNEMONIC("movsx Gv,Ew");
5939
5940 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5941 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5942
5943 /** @todo Not entirely sure how the operand size prefix is handled here,
5944 * assuming that it will be ignored. Would be nice to have a few
5945 * test for this. */
5946 /*
5947 * If rm is denoting a register, no more instruction bytes.
5948 */
5949 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5950 {
5951 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5952 {
5953 IEM_MC_BEGIN(0, 1);
5954 IEM_MC_LOCAL(uint32_t, u32Value);
5955 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5956 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5957 IEM_MC_ADVANCE_RIP();
5958 IEM_MC_END();
5959 }
5960 else
5961 {
5962 IEM_MC_BEGIN(0, 1);
5963 IEM_MC_LOCAL(uint64_t, u64Value);
5964 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
5965 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5966 IEM_MC_ADVANCE_RIP();
5967 IEM_MC_END();
5968 }
5969 }
5970 else
5971 {
5972 /*
5973 * We're loading a register from memory.
5974 */
5975 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
5976 {
5977 IEM_MC_BEGIN(0, 2);
5978 IEM_MC_LOCAL(uint32_t, u32Value);
5979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5981 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
5982 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
5983 IEM_MC_ADVANCE_RIP();
5984 IEM_MC_END();
5985 }
5986 else
5987 {
5988 IEM_MC_BEGIN(0, 2);
5989 IEM_MC_LOCAL(uint64_t, u64Value);
5990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5991 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5992 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
5993 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
5994 IEM_MC_ADVANCE_RIP();
5995 IEM_MC_END();
5996 }
5997 }
5998 return VINF_SUCCESS;
5999}
6000
6001
6002/** Opcode 0x0f 0xc0. */
6003FNIEMOP_DEF(iemOp_xadd_Eb_Gb)
6004{
6005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6006 IEMOP_MNEMONIC("xadd Eb,Gb");
6007
6008 /*
6009 * If rm is denoting a register, no more instruction bytes.
6010 */
6011 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6012 {
6013 IEMOP_HLP_NO_LOCK_PREFIX();
6014
6015 IEM_MC_BEGIN(3, 0);
6016 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6017 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6018 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6019
6020 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6021 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6022 IEM_MC_REF_EFLAGS(pEFlags);
6023 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6024
6025 IEM_MC_ADVANCE_RIP();
6026 IEM_MC_END();
6027 }
6028 else
6029 {
6030 /*
6031 * We're accessing memory.
6032 */
6033 IEM_MC_BEGIN(3, 3);
6034 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6035 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
6036 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6037 IEM_MC_LOCAL(uint8_t, u8RegCopy);
6038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6039
6040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6041 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6042 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6043 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy);
6044 IEM_MC_FETCH_EFLAGS(EFlags);
6045 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6046 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags);
6047 else
6048 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags);
6049
6050 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6051 IEM_MC_COMMIT_EFLAGS(EFlags);
6052 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8RegCopy);
6053 IEM_MC_ADVANCE_RIP();
6054 IEM_MC_END();
6055 return VINF_SUCCESS;
6056 }
6057 return VINF_SUCCESS;
6058}
6059
6060
6061/** Opcode 0x0f 0xc1. */
6062FNIEMOP_DEF(iemOp_xadd_Ev_Gv)
6063{
6064 IEMOP_MNEMONIC("xadd Ev,Gv");
6065 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6066
6067 /*
6068 * If rm is denoting a register, no more instruction bytes.
6069 */
6070 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6071 {
6072 IEMOP_HLP_NO_LOCK_PREFIX();
6073
6074 switch (pIemCpu->enmEffOpSize)
6075 {
6076 case IEMMODE_16BIT:
6077 IEM_MC_BEGIN(3, 0);
6078 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6079 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6080 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6081
6082 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6083 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6084 IEM_MC_REF_EFLAGS(pEFlags);
6085 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6086
6087 IEM_MC_ADVANCE_RIP();
6088 IEM_MC_END();
6089 return VINF_SUCCESS;
6090
6091 case IEMMODE_32BIT:
6092 IEM_MC_BEGIN(3, 0);
6093 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6094 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6095 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6096
6097 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6098 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6099 IEM_MC_REF_EFLAGS(pEFlags);
6100 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6101
6102 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6103 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
6104 IEM_MC_ADVANCE_RIP();
6105 IEM_MC_END();
6106 return VINF_SUCCESS;
6107
6108 case IEMMODE_64BIT:
6109 IEM_MC_BEGIN(3, 0);
6110 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6111 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6112 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6113
6114 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6115 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6116 IEM_MC_REF_EFLAGS(pEFlags);
6117 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6118
6119 IEM_MC_ADVANCE_RIP();
6120 IEM_MC_END();
6121 return VINF_SUCCESS;
6122
6123 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6124 }
6125 }
6126 else
6127 {
6128 /*
6129 * We're accessing memory.
6130 */
6131 switch (pIemCpu->enmEffOpSize)
6132 {
6133 case IEMMODE_16BIT:
6134 IEM_MC_BEGIN(3, 3);
6135 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6136 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
6137 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6138 IEM_MC_LOCAL(uint16_t, u16RegCopy);
6139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6140
6141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6142 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6143 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6144 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy);
6145 IEM_MC_FETCH_EFLAGS(EFlags);
6146 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6147 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags);
6148 else
6149 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags);
6150
6151 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6152 IEM_MC_COMMIT_EFLAGS(EFlags);
6153 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16RegCopy);
6154 IEM_MC_ADVANCE_RIP();
6155 IEM_MC_END();
6156 return VINF_SUCCESS;
6157
6158 case IEMMODE_32BIT:
6159 IEM_MC_BEGIN(3, 3);
6160 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6161 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
6162 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6163 IEM_MC_LOCAL(uint32_t, u32RegCopy);
6164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6165
6166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6167 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6168 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6169 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy);
6170 IEM_MC_FETCH_EFLAGS(EFlags);
6171 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6172 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags);
6173 else
6174 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags);
6175
6176 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6177 IEM_MC_COMMIT_EFLAGS(EFlags);
6178 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32RegCopy);
6179 IEM_MC_ADVANCE_RIP();
6180 IEM_MC_END();
6181 return VINF_SUCCESS;
6182
6183 case IEMMODE_64BIT:
6184 IEM_MC_BEGIN(3, 3);
6185 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6186 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
6187 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6188 IEM_MC_LOCAL(uint64_t, u64RegCopy);
6189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6190
6191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6192 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6193 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6194 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy);
6195 IEM_MC_FETCH_EFLAGS(EFlags);
6196 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6197 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags);
6198 else
6199 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags);
6200
6201 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6202 IEM_MC_COMMIT_EFLAGS(EFlags);
6203 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64RegCopy);
6204 IEM_MC_ADVANCE_RIP();
6205 IEM_MC_END();
6206 return VINF_SUCCESS;
6207
6208 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6209 }
6210 }
6211}
6212
6213/** Opcode 0x0f 0xc2. */
6214FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib);
6215
6216/** Opcode 0x0f 0xc3. */
6217FNIEMOP_STUB(iemOp_movnti_My_Gy);
6218
6219/** Opcode 0x0f 0xc4. */
6220FNIEMOP_STUB(iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib);
6221
6222/** Opcode 0x0f 0xc5. */
6223FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib);
6224
6225/** Opcode 0x0f 0xc6. */
6226FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib);
6227
6228
6229/** Opcode 0x0f 0xc7 !11/1. */
6230FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm)
6231{
6232 IEMOP_MNEMONIC("cmpxchg8b Mq");
6233
6234 IEM_MC_BEGIN(4, 3);
6235 IEM_MC_ARG(uint64_t *, pu64MemDst, 0);
6236 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1);
6237 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2);
6238 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3);
6239 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx);
6240 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx);
6241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6242
6243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6244 IEMOP_HLP_DONE_DECODING();
6245 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
6246
6247 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX);
6248 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX);
6249 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx);
6250
6251 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX);
6252 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX);
6253 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx);
6254
6255 IEM_MC_FETCH_EFLAGS(EFlags);
6256 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
6257 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6258 else
6259 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags);
6260
6261 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW);
6262 IEM_MC_COMMIT_EFLAGS(EFlags);
6263 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
6264 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */
6265 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo);
6266 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi);
6267 IEM_MC_ENDIF();
6268 IEM_MC_ADVANCE_RIP();
6269
6270 IEM_MC_END();
6271 return VINF_SUCCESS;
6272}
6273
6274
6275/** Opcode REX.W 0x0f 0xc7 !11/1. */
6276FNIEMOP_UD_STUB_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm);
6277
6278/** Opcode 0x0f 0xc7 11/6. */
6279FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm);
6280
6281/** Opcode 0x0f 0xc7 !11/6. */
6282FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm);
6283
6284/** Opcode 0x66 0x0f 0xc7 !11/6. */
6285FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm);
6286
6287/** Opcode 0xf3 0x0f 0xc7 !11/6. */
6288FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm);
6289
6290/** Opcode [0xf3] 0x0f 0xc7 !11/7. */
6291FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm);
6292
6293
6294/** Opcode 0x0f 0xc7. */
6295FNIEMOP_DEF(iemOp_Grp9)
6296{
6297 /** @todo Testcase: Check mixing 0x66 and 0xf3. Check the effect of 0xf2. */
6298 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6299 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6300 {
6301 case 0: case 2: case 3: case 4: case 5:
6302 return IEMOP_RAISE_INVALID_OPCODE();
6303 case 1:
6304 /** @todo Testcase: Check prefix effects on cmpxchg8b/16b. */
6305 if ( (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)
6306 || (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))) /** @todo Testcase: AMD seems to express a different idea here wrt prefixes. */
6307 return IEMOP_RAISE_INVALID_OPCODE();
6308 if (bRm & IEM_OP_PRF_SIZE_REX_W)
6309 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm);
6310 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm);
6311 case 6:
6312 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6313 return FNIEMOP_CALL_1(iemOp_Grp9_rdrand_Rv, bRm);
6314 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6315 {
6316 case 0:
6317 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrld_Mq, bRm);
6318 case IEM_OP_PRF_SIZE_OP:
6319 return FNIEMOP_CALL_1(iemOp_Grp9_vmclear_Mq, bRm);
6320 case IEM_OP_PRF_REPZ:
6321 return FNIEMOP_CALL_1(iemOp_Grp9_vmxon_Mq, bRm);
6322 default:
6323 return IEMOP_RAISE_INVALID_OPCODE();
6324 }
6325 case 7:
6326 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ))
6327 {
6328 case 0:
6329 case IEM_OP_PRF_REPZ:
6330 return FNIEMOP_CALL_1(iemOp_Grp9_vmptrst_Mq, bRm);
6331 default:
6332 return IEMOP_RAISE_INVALID_OPCODE();
6333 }
6334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6335 }
6336}
6337
6338
6339/**
6340 * Common 'bswap register' helper.
6341 */
6342FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg)
6343{
6344 IEMOP_HLP_NO_LOCK_PREFIX();
6345 switch (pIemCpu->enmEffOpSize)
6346 {
6347 case IEMMODE_16BIT:
6348 IEM_MC_BEGIN(1, 0);
6349 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6350 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */
6351 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst);
6352 IEM_MC_ADVANCE_RIP();
6353 IEM_MC_END();
6354 return VINF_SUCCESS;
6355
6356 case IEMMODE_32BIT:
6357 IEM_MC_BEGIN(1, 0);
6358 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6359 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
6360 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6361 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst);
6362 IEM_MC_ADVANCE_RIP();
6363 IEM_MC_END();
6364 return VINF_SUCCESS;
6365
6366 case IEMMODE_64BIT:
6367 IEM_MC_BEGIN(1, 0);
6368 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6369 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
6370 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst);
6371 IEM_MC_ADVANCE_RIP();
6372 IEM_MC_END();
6373 return VINF_SUCCESS;
6374
6375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6376 }
6377}
6378
6379
6380/** Opcode 0x0f 0xc8. */
6381FNIEMOP_DEF(iemOp_bswap_rAX_r8)
6382{
6383 IEMOP_MNEMONIC("bswap rAX/r8");
6384 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X
6385 prefix. REX.B is the correct prefix it appears. For a parallel
6386 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */
6387 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pIemCpu->uRexB);
6388}
6389
6390
6391/** Opcode 0x0f 0xc9. */
6392FNIEMOP_DEF(iemOp_bswap_rCX_r9)
6393{
6394 IEMOP_MNEMONIC("bswap rCX/r9");
6395 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pIemCpu->uRexB);
6396}
6397
6398
6399/** Opcode 0x0f 0xca. */
6400FNIEMOP_DEF(iemOp_bswap_rDX_r10)
6401{
6402 IEMOP_MNEMONIC("bswap rDX/r9");
6403 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pIemCpu->uRexB);
6404}
6405
6406
6407/** Opcode 0x0f 0xcb. */
6408FNIEMOP_DEF(iemOp_bswap_rBX_r11)
6409{
6410 IEMOP_MNEMONIC("bswap rBX/r9");
6411 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pIemCpu->uRexB);
6412}
6413
6414
6415/** Opcode 0x0f 0xcc. */
6416FNIEMOP_DEF(iemOp_bswap_rSP_r12)
6417{
6418 IEMOP_MNEMONIC("bswap rSP/r12");
6419 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pIemCpu->uRexB);
6420}
6421
6422
6423/** Opcode 0x0f 0xcd. */
6424FNIEMOP_DEF(iemOp_bswap_rBP_r13)
6425{
6426 IEMOP_MNEMONIC("bswap rBP/r13");
6427 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pIemCpu->uRexB);
6428}
6429
6430
6431/** Opcode 0x0f 0xce. */
6432FNIEMOP_DEF(iemOp_bswap_rSI_r14)
6433{
6434 IEMOP_MNEMONIC("bswap rSI/r14");
6435 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pIemCpu->uRexB);
6436}
6437
6438
6439/** Opcode 0x0f 0xcf. */
6440FNIEMOP_DEF(iemOp_bswap_rDI_r15)
6441{
6442 IEMOP_MNEMONIC("bswap rDI/r15");
6443 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pIemCpu->uRexB);
6444}
6445
6446
6447
6448/** Opcode 0x0f 0xd0. */
6449FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps);
6450/** Opcode 0x0f 0xd1. */
6451FNIEMOP_STUB(iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq);
6452/** Opcode 0x0f 0xd2. */
6453FNIEMOP_STUB(iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq);
6454/** Opcode 0x0f 0xd3. */
6455FNIEMOP_STUB(iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq);
6456/** Opcode 0x0f 0xd4. */
6457FNIEMOP_STUB(iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq);
6458/** Opcode 0x0f 0xd5. */
6459FNIEMOP_STUB(iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq);
6460/** Opcode 0x0f 0xd6. */
6461FNIEMOP_STUB(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq);
6462
6463
6464/** Opcode 0x0f 0xd7. */
6465FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq)
6466{
6467 /* Docs says register only. */
6468 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6469 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */
6470 return IEMOP_RAISE_INVALID_OPCODE();
6471
6472 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */
6473 /** @todo testcase: Check that the instruction implicitly clears the high
6474 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256
6475 * and opcode modifications are made to work with the whole width (not
6476 * just 128). */
6477 switch (pIemCpu->fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6478 {
6479 case IEM_OP_PRF_SIZE_OP: /* SSE */
6480 IEMOP_MNEMONIC("pmovmskb Gd,Nq");
6481 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS);
6482 IEM_MC_BEGIN(2, 0);
6483 IEM_MC_ARG(uint64_t *, pDst, 0);
6484 IEM_MC_ARG(uint128_t const *, pSrc, 1);
6485 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT();
6486 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
6487 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
6488 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc);
6489 IEM_MC_ADVANCE_RIP();
6490 IEM_MC_END();
6491 return VINF_SUCCESS;
6492
6493 case 0: /* MMX */
6494 IEMOP_MNEMONIC("pmovmskb Gd,Udq");
6495 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS);
6496 IEM_MC_BEGIN(2, 0);
6497 IEM_MC_ARG(uint64_t *, pDst, 0);
6498 IEM_MC_ARG(uint64_t const *, pSrc, 1);
6499 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT();
6500 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
6501 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK);
6502 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc);
6503 IEM_MC_ADVANCE_RIP();
6504 IEM_MC_END();
6505 return VINF_SUCCESS;
6506
6507 default:
6508 return IEMOP_RAISE_INVALID_OPCODE();
6509 }
6510}
6511
6512
6513/** Opcode 0x0f 0xd8. */
6514FNIEMOP_STUB(iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq);
6515/** Opcode 0x0f 0xd9. */
6516FNIEMOP_STUB(iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq);
6517/** Opcode 0x0f 0xda. */
6518FNIEMOP_STUB(iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq);
6519/** Opcode 0x0f 0xdb. */
6520FNIEMOP_STUB(iemOp_pand_Pq_Qq__pand_Vdq_Wdq);
6521/** Opcode 0x0f 0xdc. */
6522FNIEMOP_STUB(iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq);
6523/** Opcode 0x0f 0xdd. */
6524FNIEMOP_STUB(iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq);
6525/** Opcode 0x0f 0xde. */
6526FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq);
6527/** Opcode 0x0f 0xdf. */
6528FNIEMOP_STUB(iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq);
6529/** Opcode 0x0f 0xe0. */
6530FNIEMOP_STUB(iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq);
6531/** Opcode 0x0f 0xe1. */
6532FNIEMOP_STUB(iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq);
6533/** Opcode 0x0f 0xe2. */
6534FNIEMOP_STUB(iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq);
6535/** Opcode 0x0f 0xe3. */
6536FNIEMOP_STUB(iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq);
6537/** Opcode 0x0f 0xe4. */
6538FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq);
6539/** Opcode 0x0f 0xe5. */
6540FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq);
6541/** Opcode 0x0f 0xe6. */
6542FNIEMOP_STUB(iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd);
6543/** Opcode 0x0f 0xe7. */
6544FNIEMOP_STUB(iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq);
6545/** Opcode 0x0f 0xe8. */
6546FNIEMOP_STUB(iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq);
6547/** Opcode 0x0f 0xe9. */
6548FNIEMOP_STUB(iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq);
6549/** Opcode 0x0f 0xea. */
6550FNIEMOP_STUB(iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq);
6551/** Opcode 0x0f 0xeb. */
6552FNIEMOP_STUB(iemOp_por_Pq_Qq__por_Vdq_Wdq);
6553/** Opcode 0x0f 0xec. */
6554FNIEMOP_STUB(iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq);
6555/** Opcode 0x0f 0xed. */
6556FNIEMOP_STUB(iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq);
6557/** Opcode 0x0f 0xee. */
6558FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq);
6559
6560
6561/** Opcode 0x0f 0xef. */
6562FNIEMOP_DEF(iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq)
6563{
6564 IEMOP_MNEMONIC("pxor");
6565 return FNIEMOP_CALL_1(iemOpCommonMmxSse2_FullFull_To_Full, &g_iemAImpl_pxor);
6566}
6567
6568
6569/** Opcode 0x0f 0xf0. */
6570FNIEMOP_STUB(iemOp_lddqu_Vdq_Mdq);
6571/** Opcode 0x0f 0xf1. */
6572FNIEMOP_STUB(iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq);
6573/** Opcode 0x0f 0xf2. */
6574FNIEMOP_STUB(iemOp_psld_Pq_Qq__pslld_Vdq_Wdq);
6575/** Opcode 0x0f 0xf3. */
6576FNIEMOP_STUB(iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq);
6577/** Opcode 0x0f 0xf4. */
6578FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq);
6579/** Opcode 0x0f 0xf5. */
6580FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq);
6581/** Opcode 0x0f 0xf6. */
6582FNIEMOP_STUB(iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq);
6583/** Opcode 0x0f 0xf7. */
6584FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq);
6585/** Opcode 0x0f 0xf8. */
6586FNIEMOP_STUB(iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq); //NEXT
6587/** Opcode 0x0f 0xf9. */
6588FNIEMOP_STUB(iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq);
6589/** Opcode 0x0f 0xfa. */
6590FNIEMOP_STUB(iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq);
6591/** Opcode 0x0f 0xfb. */
6592FNIEMOP_STUB(iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq);
6593/** Opcode 0x0f 0xfc. */
6594FNIEMOP_STUB(iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq);
6595/** Opcode 0x0f 0xfd. */
6596FNIEMOP_STUB(iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq);
6597/** Opcode 0x0f 0xfe. */
6598FNIEMOP_STUB(iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq);
6599
6600
6601const PFNIEMOP g_apfnTwoByteMap[256] =
6602{
6603 /* 0x00 */ iemOp_Grp6,
6604 /* 0x01 */ iemOp_Grp7,
6605 /* 0x02 */ iemOp_lar_Gv_Ew,
6606 /* 0x03 */ iemOp_lsl_Gv_Ew,
6607 /* 0x04 */ iemOp_Invalid,
6608 /* 0x05 */ iemOp_syscall,
6609 /* 0x06 */ iemOp_clts,
6610 /* 0x07 */ iemOp_sysret,
6611 /* 0x08 */ iemOp_invd,
6612 /* 0x09 */ iemOp_wbinvd,
6613 /* 0x0a */ iemOp_Invalid,
6614 /* 0x0b */ iemOp_ud2,
6615 /* 0x0c */ iemOp_Invalid,
6616 /* 0x0d */ iemOp_nop_Ev_GrpP,
6617 /* 0x0e */ iemOp_femms,
6618 /* 0x0f */ iemOp_3Dnow,
6619 /* 0x10 */ iemOp_movups_Vps_Wps__movupd_Vpd_Wpd__movss_Vss_Wss__movsd_Vsd_Wsd,
6620 /* 0x11 */ iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,
6621 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps_Vq_Uq__movlpd_Vq_Mq__movsldup_Vq_Wq__movddup_Vq_Wq,
6622 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq,
6623 /* 0x14 */ iemOp_unpckhlps_Vps_Wq__unpcklpd_Vpd_Wq,
6624 /* 0x15 */ iemOp_unpckhps_Vps_Wq__unpckhpd_Vpd_Wq,
6625 /* 0x16 */ iemOp_movhps_Vq_Mq__movlhps_Vq_Uq__movhpd_Vq_Mq__movshdup_Vq_Wq,
6626 /* 0x17 */ iemOp_movhps_Mq_Vq__movhpd_Mq_Vq,
6627 /* 0x18 */ iemOp_prefetch_Grp16,
6628 /* 0x19 */ iemOp_nop_Ev,
6629 /* 0x1a */ iemOp_nop_Ev,
6630 /* 0x1b */ iemOp_nop_Ev,
6631 /* 0x1c */ iemOp_nop_Ev,
6632 /* 0x1d */ iemOp_nop_Ev,
6633 /* 0x1e */ iemOp_nop_Ev,
6634 /* 0x1f */ iemOp_nop_Ev,
6635 /* 0x20 */ iemOp_mov_Rd_Cd,
6636 /* 0x21 */ iemOp_mov_Rd_Dd,
6637 /* 0x22 */ iemOp_mov_Cd_Rd,
6638 /* 0x23 */ iemOp_mov_Dd_Rd,
6639 /* 0x24 */ iemOp_mov_Rd_Td,
6640 /* 0x25 */ iemOp_Invalid,
6641 /* 0x26 */ iemOp_mov_Td_Rd,
6642 /* 0x27 */ iemOp_Invalid,
6643 /* 0x28 */ iemOp_movaps_Vps_Wps__movapd_Vpd_Wpd,
6644 /* 0x29 */ iemOp_movaps_Wps_Vps__movapd_Wpd_Vpd,
6645 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi__cvtpi2pd_Vpd_Qpi__cvtsi2ss_Vss_Ey__cvtsi2sd_Vsd_Ey,
6646 /* 0x2b */ iemOp_movntps_Mps_Vps__movntpd_Mpd_Vpd,
6647 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps__cvttpd2pi_Ppi_Wpd__cvttss2si_Gy_Wss__cvttsd2si_Yu_Wsd,
6648 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps__cvtpd2pi_QpiWpd__cvtss2si_Gy_Wss__cvtsd2si_Gy_Wsd,
6649 /* 0x2e */ iemOp_ucomiss_Vss_Wss__ucomisd_Vsd_Wsd,
6650 /* 0x2f */ iemOp_comiss_Vss_Wss__comisd_Vsd_Wsd,
6651 /* 0x30 */ iemOp_wrmsr,
6652 /* 0x31 */ iemOp_rdtsc,
6653 /* 0x32 */ iemOp_rdmsr,
6654 /* 0x33 */ iemOp_rdpmc,
6655 /* 0x34 */ iemOp_sysenter,
6656 /* 0x35 */ iemOp_sysexit,
6657 /* 0x36 */ iemOp_Invalid,
6658 /* 0x37 */ iemOp_getsec,
6659 /* 0x38 */ iemOp_3byte_Esc_A4,
6660 /* 0x39 */ iemOp_Invalid,
6661 /* 0x3a */ iemOp_3byte_Esc_A5,
6662 /* 0x3b */ iemOp_Invalid,
6663 /* 0x3c */ iemOp_movnti_Gv_Ev/*??*/,
6664 /* 0x3d */ iemOp_Invalid,
6665 /* 0x3e */ iemOp_Invalid,
6666 /* 0x3f */ iemOp_Invalid,
6667 /* 0x40 */ iemOp_cmovo_Gv_Ev,
6668 /* 0x41 */ iemOp_cmovno_Gv_Ev,
6669 /* 0x42 */ iemOp_cmovc_Gv_Ev,
6670 /* 0x43 */ iemOp_cmovnc_Gv_Ev,
6671 /* 0x44 */ iemOp_cmove_Gv_Ev,
6672 /* 0x45 */ iemOp_cmovne_Gv_Ev,
6673 /* 0x46 */ iemOp_cmovbe_Gv_Ev,
6674 /* 0x47 */ iemOp_cmovnbe_Gv_Ev,
6675 /* 0x48 */ iemOp_cmovs_Gv_Ev,
6676 /* 0x49 */ iemOp_cmovns_Gv_Ev,
6677 /* 0x4a */ iemOp_cmovp_Gv_Ev,
6678 /* 0x4b */ iemOp_cmovnp_Gv_Ev,
6679 /* 0x4c */ iemOp_cmovl_Gv_Ev,
6680 /* 0x4d */ iemOp_cmovnl_Gv_Ev,
6681 /* 0x4e */ iemOp_cmovle_Gv_Ev,
6682 /* 0x4f */ iemOp_cmovnle_Gv_Ev,
6683 /* 0x50 */ iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd,
6684 /* 0x51 */ iemOp_sqrtps_Wps_Vps__sqrtpd_Wpd_Vpd__sqrtss_Vss_Wss__sqrtsd_Vsd_Wsd,
6685 /* 0x52 */ iemOp_rsqrtps_Wps_Vps__rsqrtss_Vss_Wss,
6686 /* 0x53 */ iemOp_rcpps_Wps_Vps__rcpss_Vs_Wss,
6687 /* 0x54 */ iemOp_andps_Vps_Wps__andpd_Wpd_Vpd,
6688 /* 0x55 */ iemOp_andnps_Vps_Wps__andnpd_Wpd_Vpd,
6689 /* 0x56 */ iemOp_orps_Wpd_Vpd__orpd_Wpd_Vpd,
6690 /* 0x57 */ iemOp_xorps_Vps_Wps__xorpd_Wpd_Vpd,
6691 /* 0x58 */ iemOp_addps_Vps_Wps__addpd_Vpd_Wpd__addss_Vss_Wss__addsd_Vsd_Wsd,
6692 /* 0x59 */ iemOp_mulps_Vps_Wps__mulpd_Vpd_Wpd__mulss_Vss__Wss__mulsd_Vsd_Wsd,
6693 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps__cvtpd2ps_Vps_Wpd__cvtss2sd_Vsd_Wss__cvtsd2ss_Vss_Wsd,
6694 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq__cvtps2dq_Vdq_Wps__cvtps2dq_Vdq_Wps,
6695 /* 0x5c */ iemOp_subps_Vps_Wps__subpd_Vps_Wdp__subss_Vss_Wss__subsd_Vsd_Wsd,
6696 /* 0x5d */ iemOp_minps_Vps_Wps__minpd_Vpd_Wpd__minss_Vss_Wss__minsd_Vsd_Wsd,
6697 /* 0x5e */ iemOp_divps_Vps_Wps__divpd_Vpd_Wpd__divss_Vss_Wss__divsd_Vsd_Wsd,
6698 /* 0x5f */ iemOp_maxps_Vps_Wps__maxpd_Vpd_Wpd__maxss_Vss_Wss__maxsd_Vsd_Wsd,
6699 /* 0x60 */ iemOp_punpcklbw_Pq_Qd__punpcklbw_Vdq_Wdq,
6700 /* 0x61 */ iemOp_punpcklwd_Pq_Qd__punpcklwd_Vdq_Wdq,
6701 /* 0x62 */ iemOp_punpckldq_Pq_Qd__punpckldq_Vdq_Wdq,
6702 /* 0x63 */ iemOp_packsswb_Pq_Qq__packsswb_Vdq_Wdq,
6703 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq__pcmpgtb_Vdq_Wdq,
6704 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq__pcmpgtw_Vdq_Wdq,
6705 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq__pcmpgtd_Vdq_Wdq,
6706 /* 0x67 */ iemOp_packuswb_Pq_Qq__packuswb_Vdq_Wdq,
6707 /* 0x68 */ iemOp_punpckhbw_Pq_Qq__punpckhbw_Vdq_Wdq,
6708 /* 0x69 */ iemOp_punpckhwd_Pq_Qd__punpckhwd_Vdq_Wdq,
6709 /* 0x6a */ iemOp_punpckhdq_Pq_Qd__punpckhdq_Vdq_Wdq,
6710 /* 0x6b */ iemOp_packssdw_Pq_Qd__packssdq_Vdq_Wdq,
6711 /* 0x6c */ iemOp_punpcklqdq_Vdq_Wdq,
6712 /* 0x6d */ iemOp_punpckhqdq_Vdq_Wdq,
6713 /* 0x6e */ iemOp_movd_q_Pd_Ey__movd_q_Vy_Ey,
6714 /* 0x6f */ iemOp_movq_Pq_Qq__movdqa_Vdq_Wdq__movdqu_Vdq_Wdq,
6715 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib__pshufd_Vdq_Wdq_Ib__pshufhw_Vdq_Wdq_Ib__pshuflq_Vdq_Wdq_Ib,
6716 /* 0x71 */ iemOp_Grp12,
6717 /* 0x72 */ iemOp_Grp13,
6718 /* 0x73 */ iemOp_Grp14,
6719 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq__pcmpeqb_Vdq_Wdq,
6720 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq__pcmpeqw_Vdq_Wdq,
6721 /* 0x76 */ iemOp_pcmped_Pq_Qq__pcmpeqd_Vdq_Wdq,
6722 /* 0x77 */ iemOp_emms,
6723 /* 0x78 */ iemOp_vmread_AmdGrp17,
6724 /* 0x79 */ iemOp_vmwrite,
6725 /* 0x7a */ iemOp_Invalid,
6726 /* 0x7b */ iemOp_Invalid,
6727 /* 0x7c */ iemOp_haddpd_Vdp_Wpd__haddps_Vps_Wps,
6728 /* 0x7d */ iemOp_hsubpd_Vpd_Wpd__hsubps_Vps_Wps,
6729 /* 0x7e */ iemOp_movd_q_Ey_Pd__movd_q_Ey_Vy__movq_Vq_Wq,
6730 /* 0x7f */ iemOp_movq_Qq_Pq__movq_movdqa_Wdq_Vdq__movdqu_Wdq_Vdq,
6731 /* 0x80 */ iemOp_jo_Jv,
6732 /* 0x81 */ iemOp_jno_Jv,
6733 /* 0x82 */ iemOp_jc_Jv,
6734 /* 0x83 */ iemOp_jnc_Jv,
6735 /* 0x84 */ iemOp_je_Jv,
6736 /* 0x85 */ iemOp_jne_Jv,
6737 /* 0x86 */ iemOp_jbe_Jv,
6738 /* 0x87 */ iemOp_jnbe_Jv,
6739 /* 0x88 */ iemOp_js_Jv,
6740 /* 0x89 */ iemOp_jns_Jv,
6741 /* 0x8a */ iemOp_jp_Jv,
6742 /* 0x8b */ iemOp_jnp_Jv,
6743 /* 0x8c */ iemOp_jl_Jv,
6744 /* 0x8d */ iemOp_jnl_Jv,
6745 /* 0x8e */ iemOp_jle_Jv,
6746 /* 0x8f */ iemOp_jnle_Jv,
6747 /* 0x90 */ iemOp_seto_Eb,
6748 /* 0x91 */ iemOp_setno_Eb,
6749 /* 0x92 */ iemOp_setc_Eb,
6750 /* 0x93 */ iemOp_setnc_Eb,
6751 /* 0x94 */ iemOp_sete_Eb,
6752 /* 0x95 */ iemOp_setne_Eb,
6753 /* 0x96 */ iemOp_setbe_Eb,
6754 /* 0x97 */ iemOp_setnbe_Eb,
6755 /* 0x98 */ iemOp_sets_Eb,
6756 /* 0x99 */ iemOp_setns_Eb,
6757 /* 0x9a */ iemOp_setp_Eb,
6758 /* 0x9b */ iemOp_setnp_Eb,
6759 /* 0x9c */ iemOp_setl_Eb,
6760 /* 0x9d */ iemOp_setnl_Eb,
6761 /* 0x9e */ iemOp_setle_Eb,
6762 /* 0x9f */ iemOp_setnle_Eb,
6763 /* 0xa0 */ iemOp_push_fs,
6764 /* 0xa1 */ iemOp_pop_fs,
6765 /* 0xa2 */ iemOp_cpuid,
6766 /* 0xa3 */ iemOp_bt_Ev_Gv,
6767 /* 0xa4 */ iemOp_shld_Ev_Gv_Ib,
6768 /* 0xa5 */ iemOp_shld_Ev_Gv_CL,
6769 /* 0xa6 */ iemOp_Invalid,
6770 /* 0xa7 */ iemOp_Invalid,
6771 /* 0xa8 */ iemOp_push_gs,
6772 /* 0xa9 */ iemOp_pop_gs,
6773 /* 0xaa */ iemOp_rsm,
6774 /* 0xab */ iemOp_bts_Ev_Gv,
6775 /* 0xac */ iemOp_shrd_Ev_Gv_Ib,
6776 /* 0xad */ iemOp_shrd_Ev_Gv_CL,
6777 /* 0xae */ iemOp_Grp15,
6778 /* 0xaf */ iemOp_imul_Gv_Ev,
6779 /* 0xb0 */ iemOp_cmpxchg_Eb_Gb,
6780 /* 0xb1 */ iemOp_cmpxchg_Ev_Gv,
6781 /* 0xb2 */ iemOp_lss_Gv_Mp,
6782 /* 0xb3 */ iemOp_btr_Ev_Gv,
6783 /* 0xb4 */ iemOp_lfs_Gv_Mp,
6784 /* 0xb5 */ iemOp_lgs_Gv_Mp,
6785 /* 0xb6 */ iemOp_movzx_Gv_Eb,
6786 /* 0xb7 */ iemOp_movzx_Gv_Ew,
6787 /* 0xb8 */ iemOp_popcnt_Gv_Ev_jmpe,
6788 /* 0xb9 */ iemOp_Grp10,
6789 /* 0xba */ iemOp_Grp8,
6790 /* 0xbd */ iemOp_btc_Ev_Gv,
6791 /* 0xbc */ iemOp_bsf_Gv_Ev,
6792 /* 0xbd */ iemOp_bsr_Gv_Ev,
6793 /* 0xbe */ iemOp_movsx_Gv_Eb,
6794 /* 0xbf */ iemOp_movsx_Gv_Ew,
6795 /* 0xc0 */ iemOp_xadd_Eb_Gb,
6796 /* 0xc1 */ iemOp_xadd_Ev_Gv,
6797 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib__cmppd_Vpd_Wpd_Ib__cmpss_Vss_Wss_Ib__cmpsd_Vsd_Wsd_Ib,
6798 /* 0xc3 */ iemOp_movnti_My_Gy,
6799 /* 0xc4 */ iemOp_pinsrw_Pq_Ry_Mw_Ib__pinsrw_Vdq_Ry_Mw_Ib,
6800 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib__pextrw_Gd_Udq_Ib,
6801 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib__shufdp_Vpd_Wpd_Ib,
6802 /* 0xc7 */ iemOp_Grp9,
6803 /* 0xc8 */ iemOp_bswap_rAX_r8,
6804 /* 0xc9 */ iemOp_bswap_rCX_r9,
6805 /* 0xca */ iemOp_bswap_rDX_r10,
6806 /* 0xcb */ iemOp_bswap_rBX_r11,
6807 /* 0xcc */ iemOp_bswap_rSP_r12,
6808 /* 0xcd */ iemOp_bswap_rBP_r13,
6809 /* 0xce */ iemOp_bswap_rSI_r14,
6810 /* 0xcf */ iemOp_bswap_rDI_r15,
6811 /* 0xd0 */ iemOp_addsubpd_Vpd_Wpd__addsubps_Vps_Wps,
6812 /* 0xd1 */ iemOp_psrlw_Pp_Qp__psrlw_Vdp_Wdq,
6813 /* 0xd2 */ iemOp_psrld_Pq_Qq__psrld_Vdq_Wdq,
6814 /* 0xd3 */ iemOp_psrlq_Pq_Qq__psrlq_Vdq_Wdq,
6815 /* 0xd4 */ iemOp_paddq_Pq_Qq__paddq_Vdq_Wdq,
6816 /* 0xd5 */ iemOp_pmulq_Pq_Qq__pmullw_Vdq_Wdq,
6817 /* 0xd6 */ iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq,
6818 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq__pmovmskb_Gd_Udq,
6819 /* 0xd8 */ iemOp_psubusb_Pq_Qq__psubusb_Vdq_Wdq,
6820 /* 0xd9 */ iemOp_psubusw_Pq_Qq__psubusw_Vdq_Wdq,
6821 /* 0xda */ iemOp_pminub_Pq_Qq__pminub_Vdq_Wdq,
6822 /* 0xdb */ iemOp_pand_Pq_Qq__pand_Vdq_Wdq,
6823 /* 0xdc */ iemOp_paddusb_Pq_Qq__paddusb_Vdq_Wdq,
6824 /* 0xdd */ iemOp_paddusw_Pq_Qq__paddusw_Vdq_Wdq,
6825 /* 0xde */ iemOp_pmaxub_Pq_Qq__pamxub_Vdq_Wdq,
6826 /* 0xdf */ iemOp_pandn_Pq_Qq__pandn_Vdq_Wdq,
6827 /* 0xe0 */ iemOp_pavgb_Pq_Qq__pavgb_Vdq_Wdq,
6828 /* 0xe1 */ iemOp_psraw_Pq_Qq__psraw_Vdq_Wdq,
6829 /* 0xe2 */ iemOp_psrad_Pq_Qq__psrad_Vdq_Wdq,
6830 /* 0xe3 */ iemOp_pavgw_Pq_Qq__pavgw_Vdq_Wdq,
6831 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq__pmulhuw_Vdq_Wdq,
6832 /* 0xe5 */ iemOp_pmulhw_Pq_Qq__pmulhw_Vdq_Wdq,
6833 /* 0xe6 */ iemOp_cvttpd2dq_Vdq_Wdp__cvtdq2pd_Vdq_Wpd__cvtpd2dq_Vdq_Wpd,
6834 /* 0xe7 */ iemOp_movntq_Mq_Pq__movntdq_Mdq_Vdq,
6835 /* 0xe8 */ iemOp_psubsb_Pq_Qq__psubsb_Vdq_Wdq,
6836 /* 0xe9 */ iemOp_psubsw_Pq_Qq__psubsw_Vdq_Wdq,
6837 /* 0xea */ iemOp_pminsw_Pq_Qq__pminsw_Vdq_Wdq,
6838 /* 0xeb */ iemOp_por_Pq_Qq__por_Vdq_Wdq,
6839 /* 0xec */ iemOp_paddsb_Pq_Qq__paddsb_Vdq_Wdq,
6840 /* 0xed */ iemOp_paddsw_Pq_Qq__paddsw_Vdq_Wdq,
6841 /* 0xee */ iemOp_pmaxsw_Pq_Qq__pmaxsw_Vdq_Wdq,
6842 /* 0xef */ iemOp_pxor_Pq_Qq__pxor_Vdq_Wdq,
6843 /* 0xf0 */ iemOp_lddqu_Vdq_Mdq,
6844 /* 0xf1 */ iemOp_psllw_Pq_Qq__pslw_Vdq_Wdq,
6845 /* 0xf2 */ iemOp_psld_Pq_Qq__pslld_Vdq_Wdq,
6846 /* 0xf3 */ iemOp_psllq_Pq_Qq__pslq_Vdq_Wdq,
6847 /* 0xf4 */ iemOp_pmuludq_Pq_Qq__pmuludq_Vdq_Wdq,
6848 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq__pmaddwd_Vdq_Wdq,
6849 /* 0xf6 */ iemOp_psadbw_Pq_Qq__psadbw_Vdq_Wdq,
6850 /* 0xf7 */ iemOp_maskmovq_Pq_Nq__maskmovdqu_Vdq_Udq,
6851 /* 0xf8 */ iemOp_psubb_Pq_Qq_psubb_Vdq_Wdq,
6852 /* 0xf9 */ iemOp_psubw_Pq_Qq__psubw_Vdq_Wdq,
6853 /* 0xfa */ iemOp_psubd_Pq_Qq__psubd_Vdq_Wdq,
6854 /* 0xfb */ iemOp_psubq_Pq_Qq__psbuq_Vdq_Wdq,
6855 /* 0xfc */ iemOp_paddb_Pq_Qq__paddb_Vdq_Wdq,
6856 /* 0xfd */ iemOp_paddw_Pq_Qq__paddw_Vdq_Wdq,
6857 /* 0xfe */ iemOp_paddd_Pq_Qq__paddd_Vdq_Wdq,
6858 /* 0xff */ iemOp_Invalid
6859};
6860
6861/** @} */
6862
6863
6864/** @name One byte opcodes.
6865 *
6866 * @{
6867 */
6868
6869/** Opcode 0x00. */
6870FNIEMOP_DEF(iemOp_add_Eb_Gb)
6871{
6872 IEMOP_MNEMONIC("add Eb,Gb");
6873 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
6874}
6875
6876
6877/** Opcode 0x01. */
6878FNIEMOP_DEF(iemOp_add_Ev_Gv)
6879{
6880 IEMOP_MNEMONIC("add Ev,Gv");
6881 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
6882}
6883
6884
6885/** Opcode 0x02. */
6886FNIEMOP_DEF(iemOp_add_Gb_Eb)
6887{
6888 IEMOP_MNEMONIC("add Gb,Eb");
6889 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
6890}
6891
6892
6893/** Opcode 0x03. */
6894FNIEMOP_DEF(iemOp_add_Gv_Ev)
6895{
6896 IEMOP_MNEMONIC("add Gv,Ev");
6897 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
6898}
6899
6900
6901/** Opcode 0x04. */
6902FNIEMOP_DEF(iemOp_add_Al_Ib)
6903{
6904 IEMOP_MNEMONIC("add al,Ib");
6905 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
6906}
6907
6908
6909/** Opcode 0x05. */
6910FNIEMOP_DEF(iemOp_add_eAX_Iz)
6911{
6912 IEMOP_MNEMONIC("add rAX,Iz");
6913 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
6914}
6915
6916
6917/** Opcode 0x06. */
6918FNIEMOP_DEF(iemOp_push_ES)
6919{
6920 IEMOP_MNEMONIC("push es");
6921 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
6922}
6923
6924
6925/** Opcode 0x07. */
6926FNIEMOP_DEF(iemOp_pop_ES)
6927{
6928 IEMOP_MNEMONIC("pop es");
6929 IEMOP_HLP_NO_64BIT();
6930 IEMOP_HLP_NO_LOCK_PREFIX();
6931 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pIemCpu->enmEffOpSize);
6932}
6933
6934
6935/** Opcode 0x08. */
6936FNIEMOP_DEF(iemOp_or_Eb_Gb)
6937{
6938 IEMOP_MNEMONIC("or Eb,Gb");
6939 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6940 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
6941}
6942
6943
6944/** Opcode 0x09. */
6945FNIEMOP_DEF(iemOp_or_Ev_Gv)
6946{
6947 IEMOP_MNEMONIC("or Ev,Gv ");
6948 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6949 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
6950}
6951
6952
6953/** Opcode 0x0a. */
6954FNIEMOP_DEF(iemOp_or_Gb_Eb)
6955{
6956 IEMOP_MNEMONIC("or Gb,Eb");
6957 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6958 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
6959}
6960
6961
6962/** Opcode 0x0b. */
6963FNIEMOP_DEF(iemOp_or_Gv_Ev)
6964{
6965 IEMOP_MNEMONIC("or Gv,Ev");
6966 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6967 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
6968}
6969
6970
6971/** Opcode 0x0c. */
6972FNIEMOP_DEF(iemOp_or_Al_Ib)
6973{
6974 IEMOP_MNEMONIC("or al,Ib");
6975 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6976 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
6977}
6978
6979
6980/** Opcode 0x0d. */
6981FNIEMOP_DEF(iemOp_or_eAX_Iz)
6982{
6983 IEMOP_MNEMONIC("or rAX,Iz");
6984 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6985 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
6986}
6987
6988
6989/** Opcode 0x0e. */
6990FNIEMOP_DEF(iemOp_push_CS)
6991{
6992 IEMOP_MNEMONIC("push cs");
6993 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
6994}
6995
6996
6997/** Opcode 0x0f. */
6998FNIEMOP_DEF(iemOp_2byteEscape)
6999{
7000 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7001 return FNIEMOP_CALL(g_apfnTwoByteMap[b]);
7002}
7003
7004/** Opcode 0x10. */
7005FNIEMOP_DEF(iemOp_adc_Eb_Gb)
7006{
7007 IEMOP_MNEMONIC("adc Eb,Gb");
7008 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
7009}
7010
7011
7012/** Opcode 0x11. */
7013FNIEMOP_DEF(iemOp_adc_Ev_Gv)
7014{
7015 IEMOP_MNEMONIC("adc Ev,Gv");
7016 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
7017}
7018
7019
7020/** Opcode 0x12. */
7021FNIEMOP_DEF(iemOp_adc_Gb_Eb)
7022{
7023 IEMOP_MNEMONIC("adc Gb,Eb");
7024 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
7025}
7026
7027
7028/** Opcode 0x13. */
7029FNIEMOP_DEF(iemOp_adc_Gv_Ev)
7030{
7031 IEMOP_MNEMONIC("adc Gv,Ev");
7032 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
7033}
7034
7035
7036/** Opcode 0x14. */
7037FNIEMOP_DEF(iemOp_adc_Al_Ib)
7038{
7039 IEMOP_MNEMONIC("adc al,Ib");
7040 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
7041}
7042
7043
7044/** Opcode 0x15. */
7045FNIEMOP_DEF(iemOp_adc_eAX_Iz)
7046{
7047 IEMOP_MNEMONIC("adc rAX,Iz");
7048 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
7049}
7050
7051
7052/** Opcode 0x16. */
7053FNIEMOP_DEF(iemOp_push_SS)
7054{
7055 IEMOP_MNEMONIC("push ss");
7056 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
7057}
7058
7059
7060/** Opcode 0x17. */
7061FNIEMOP_DEF(iemOp_pop_SS)
7062{
7063 IEMOP_MNEMONIC("pop ss"); /** @todo implies instruction fusing? */
7064 IEMOP_HLP_NO_LOCK_PREFIX();
7065 IEMOP_HLP_NO_64BIT();
7066 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pIemCpu->enmEffOpSize);
7067}
7068
7069
7070/** Opcode 0x18. */
7071FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
7072{
7073 IEMOP_MNEMONIC("sbb Eb,Gb");
7074 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
7075}
7076
7077
7078/** Opcode 0x19. */
7079FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
7080{
7081 IEMOP_MNEMONIC("sbb Ev,Gv");
7082 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
7083}
7084
7085
7086/** Opcode 0x1a. */
7087FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
7088{
7089 IEMOP_MNEMONIC("sbb Gb,Eb");
7090 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
7091}
7092
7093
7094/** Opcode 0x1b. */
7095FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
7096{
7097 IEMOP_MNEMONIC("sbb Gv,Ev");
7098 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
7099}
7100
7101
7102/** Opcode 0x1c. */
7103FNIEMOP_DEF(iemOp_sbb_Al_Ib)
7104{
7105 IEMOP_MNEMONIC("sbb al,Ib");
7106 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
7107}
7108
7109
7110/** Opcode 0x1d. */
7111FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
7112{
7113 IEMOP_MNEMONIC("sbb rAX,Iz");
7114 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
7115}
7116
7117
7118/** Opcode 0x1e. */
7119FNIEMOP_DEF(iemOp_push_DS)
7120{
7121 IEMOP_MNEMONIC("push ds");
7122 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
7123}
7124
7125
7126/** Opcode 0x1f. */
7127FNIEMOP_DEF(iemOp_pop_DS)
7128{
7129 IEMOP_MNEMONIC("pop ds");
7130 IEMOP_HLP_NO_LOCK_PREFIX();
7131 IEMOP_HLP_NO_64BIT();
7132 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pIemCpu->enmEffOpSize);
7133}
7134
7135
7136/** Opcode 0x20. */
7137FNIEMOP_DEF(iemOp_and_Eb_Gb)
7138{
7139 IEMOP_MNEMONIC("and Eb,Gb");
7140 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7141 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
7142}
7143
7144
7145/** Opcode 0x21. */
7146FNIEMOP_DEF(iemOp_and_Ev_Gv)
7147{
7148 IEMOP_MNEMONIC("and Ev,Gv");
7149 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7150 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
7151}
7152
7153
7154/** Opcode 0x22. */
7155FNIEMOP_DEF(iemOp_and_Gb_Eb)
7156{
7157 IEMOP_MNEMONIC("and Gb,Eb");
7158 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7159 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
7160}
7161
7162
7163/** Opcode 0x23. */
7164FNIEMOP_DEF(iemOp_and_Gv_Ev)
7165{
7166 IEMOP_MNEMONIC("and Gv,Ev");
7167 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7168 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
7169}
7170
7171
7172/** Opcode 0x24. */
7173FNIEMOP_DEF(iemOp_and_Al_Ib)
7174{
7175 IEMOP_MNEMONIC("and al,Ib");
7176 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7177 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
7178}
7179
7180
7181/** Opcode 0x25. */
7182FNIEMOP_DEF(iemOp_and_eAX_Iz)
7183{
7184 IEMOP_MNEMONIC("and rAX,Iz");
7185 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7186 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
7187}
7188
7189
7190/** Opcode 0x26. */
7191FNIEMOP_DEF(iemOp_seg_ES)
7192{
7193 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
7194 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_ES;
7195 pIemCpu->iEffSeg = X86_SREG_ES;
7196
7197 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7198 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7199}
7200
7201
7202/** Opcode 0x27. */
7203FNIEMOP_DEF(iemOp_daa)
7204{
7205 IEMOP_MNEMONIC("daa AL");
7206 IEMOP_HLP_NO_64BIT();
7207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7208 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7209 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
7210}
7211
7212
7213/** Opcode 0x28. */
7214FNIEMOP_DEF(iemOp_sub_Eb_Gb)
7215{
7216 IEMOP_MNEMONIC("sub Eb,Gb");
7217 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
7218}
7219
7220
7221/** Opcode 0x29. */
7222FNIEMOP_DEF(iemOp_sub_Ev_Gv)
7223{
7224 IEMOP_MNEMONIC("sub Ev,Gv");
7225 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
7226}
7227
7228
7229/** Opcode 0x2a. */
7230FNIEMOP_DEF(iemOp_sub_Gb_Eb)
7231{
7232 IEMOP_MNEMONIC("sub Gb,Eb");
7233 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
7234}
7235
7236
7237/** Opcode 0x2b. */
7238FNIEMOP_DEF(iemOp_sub_Gv_Ev)
7239{
7240 IEMOP_MNEMONIC("sub Gv,Ev");
7241 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
7242}
7243
7244
7245/** Opcode 0x2c. */
7246FNIEMOP_DEF(iemOp_sub_Al_Ib)
7247{
7248 IEMOP_MNEMONIC("sub al,Ib");
7249 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
7250}
7251
7252
7253/** Opcode 0x2d. */
7254FNIEMOP_DEF(iemOp_sub_eAX_Iz)
7255{
7256 IEMOP_MNEMONIC("sub rAX,Iz");
7257 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
7258}
7259
7260
7261/** Opcode 0x2e. */
7262FNIEMOP_DEF(iemOp_seg_CS)
7263{
7264 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
7265 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_CS;
7266 pIemCpu->iEffSeg = X86_SREG_CS;
7267
7268 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7269 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7270}
7271
7272
7273/** Opcode 0x2f. */
7274FNIEMOP_DEF(iemOp_das)
7275{
7276 IEMOP_MNEMONIC("das AL");
7277 IEMOP_HLP_NO_64BIT();
7278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7279 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
7280 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
7281}
7282
7283
7284/** Opcode 0x30. */
7285FNIEMOP_DEF(iemOp_xor_Eb_Gb)
7286{
7287 IEMOP_MNEMONIC("xor Eb,Gb");
7288 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7289 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
7290}
7291
7292
7293/** Opcode 0x31. */
7294FNIEMOP_DEF(iemOp_xor_Ev_Gv)
7295{
7296 IEMOP_MNEMONIC("xor Ev,Gv");
7297 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7298 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
7299}
7300
7301
7302/** Opcode 0x32. */
7303FNIEMOP_DEF(iemOp_xor_Gb_Eb)
7304{
7305 IEMOP_MNEMONIC("xor Gb,Eb");
7306 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7307 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
7308}
7309
7310
7311/** Opcode 0x33. */
7312FNIEMOP_DEF(iemOp_xor_Gv_Ev)
7313{
7314 IEMOP_MNEMONIC("xor Gv,Ev");
7315 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7316 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
7317}
7318
7319
7320/** Opcode 0x34. */
7321FNIEMOP_DEF(iemOp_xor_Al_Ib)
7322{
7323 IEMOP_MNEMONIC("xor al,Ib");
7324 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7325 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
7326}
7327
7328
7329/** Opcode 0x35. */
7330FNIEMOP_DEF(iemOp_xor_eAX_Iz)
7331{
7332 IEMOP_MNEMONIC("xor rAX,Iz");
7333 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7334 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
7335}
7336
7337
7338/** Opcode 0x36. */
7339FNIEMOP_DEF(iemOp_seg_SS)
7340{
7341 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
7342 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_SS;
7343 pIemCpu->iEffSeg = X86_SREG_SS;
7344
7345 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7346 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7347}
7348
7349
7350/** Opcode 0x37. */
7351FNIEMOP_STUB(iemOp_aaa);
7352
7353
7354/** Opcode 0x38. */
7355FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
7356{
7357 IEMOP_MNEMONIC("cmp Eb,Gb");
7358 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7359 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
7360}
7361
7362
7363/** Opcode 0x39. */
7364FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
7365{
7366 IEMOP_MNEMONIC("cmp Ev,Gv");
7367 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
7368 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
7369}
7370
7371
7372/** Opcode 0x3a. */
7373FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
7374{
7375 IEMOP_MNEMONIC("cmp Gb,Eb");
7376 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
7377}
7378
7379
7380/** Opcode 0x3b. */
7381FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
7382{
7383 IEMOP_MNEMONIC("cmp Gv,Ev");
7384 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
7385}
7386
7387
7388/** Opcode 0x3c. */
7389FNIEMOP_DEF(iemOp_cmp_Al_Ib)
7390{
7391 IEMOP_MNEMONIC("cmp al,Ib");
7392 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
7393}
7394
7395
7396/** Opcode 0x3d. */
7397FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
7398{
7399 IEMOP_MNEMONIC("cmp rAX,Iz");
7400 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
7401}
7402
7403
7404/** Opcode 0x3e. */
7405FNIEMOP_DEF(iemOp_seg_DS)
7406{
7407 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
7408 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_DS;
7409 pIemCpu->iEffSeg = X86_SREG_DS;
7410
7411 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7412 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7413}
7414
7415
7416/** Opcode 0x3f. */
7417FNIEMOP_STUB(iemOp_aas);
7418
7419/**
7420 * Common 'inc/dec/not/neg register' helper.
7421 */
7422FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
7423{
7424 IEMOP_HLP_NO_LOCK_PREFIX();
7425 switch (pIemCpu->enmEffOpSize)
7426 {
7427 case IEMMODE_16BIT:
7428 IEM_MC_BEGIN(2, 0);
7429 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7430 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7431 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
7432 IEM_MC_REF_EFLAGS(pEFlags);
7433 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
7434 IEM_MC_ADVANCE_RIP();
7435 IEM_MC_END();
7436 return VINF_SUCCESS;
7437
7438 case IEMMODE_32BIT:
7439 IEM_MC_BEGIN(2, 0);
7440 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7441 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7442 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7443 IEM_MC_REF_EFLAGS(pEFlags);
7444 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
7445 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7446 IEM_MC_ADVANCE_RIP();
7447 IEM_MC_END();
7448 return VINF_SUCCESS;
7449
7450 case IEMMODE_64BIT:
7451 IEM_MC_BEGIN(2, 0);
7452 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7453 IEM_MC_ARG(uint32_t *, pEFlags, 1);
7454 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7455 IEM_MC_REF_EFLAGS(pEFlags);
7456 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
7457 IEM_MC_ADVANCE_RIP();
7458 IEM_MC_END();
7459 return VINF_SUCCESS;
7460 }
7461 return VINF_SUCCESS;
7462}
7463
7464
7465/** Opcode 0x40. */
7466FNIEMOP_DEF(iemOp_inc_eAX)
7467{
7468 /*
7469 * This is a REX prefix in 64-bit mode.
7470 */
7471 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7472 {
7473 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
7474 pIemCpu->fPrefixes |= IEM_OP_PRF_REX;
7475
7476 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7477 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7478 }
7479
7480 IEMOP_MNEMONIC("inc eAX");
7481 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
7482}
7483
7484
7485/** Opcode 0x41. */
7486FNIEMOP_DEF(iemOp_inc_eCX)
7487{
7488 /*
7489 * This is a REX prefix in 64-bit mode.
7490 */
7491 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7492 {
7493 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
7494 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
7495 pIemCpu->uRexB = 1 << 3;
7496
7497 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7498 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7499 }
7500
7501 IEMOP_MNEMONIC("inc eCX");
7502 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
7503}
7504
7505
7506/** Opcode 0x42. */
7507FNIEMOP_DEF(iemOp_inc_eDX)
7508{
7509 /*
7510 * This is a REX prefix in 64-bit mode.
7511 */
7512 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7513 {
7514 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
7515 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
7516 pIemCpu->uRexIndex = 1 << 3;
7517
7518 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7519 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7520 }
7521
7522 IEMOP_MNEMONIC("inc eDX");
7523 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
7524}
7525
7526
7527
7528/** Opcode 0x43. */
7529FNIEMOP_DEF(iemOp_inc_eBX)
7530{
7531 /*
7532 * This is a REX prefix in 64-bit mode.
7533 */
7534 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7535 {
7536 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
7537 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7538 pIemCpu->uRexB = 1 << 3;
7539 pIemCpu->uRexIndex = 1 << 3;
7540
7541 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7542 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7543 }
7544
7545 IEMOP_MNEMONIC("inc eBX");
7546 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
7547}
7548
7549
7550/** Opcode 0x44. */
7551FNIEMOP_DEF(iemOp_inc_eSP)
7552{
7553 /*
7554 * This is a REX prefix in 64-bit mode.
7555 */
7556 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7557 {
7558 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
7559 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
7560 pIemCpu->uRexReg = 1 << 3;
7561
7562 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7563 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7564 }
7565
7566 IEMOP_MNEMONIC("inc eSP");
7567 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
7568}
7569
7570
7571/** Opcode 0x45. */
7572FNIEMOP_DEF(iemOp_inc_eBP)
7573{
7574 /*
7575 * This is a REX prefix in 64-bit mode.
7576 */
7577 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7578 {
7579 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
7580 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
7581 pIemCpu->uRexReg = 1 << 3;
7582 pIemCpu->uRexB = 1 << 3;
7583
7584 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7585 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7586 }
7587
7588 IEMOP_MNEMONIC("inc eBP");
7589 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
7590}
7591
7592
7593/** Opcode 0x46. */
7594FNIEMOP_DEF(iemOp_inc_eSI)
7595{
7596 /*
7597 * This is a REX prefix in 64-bit mode.
7598 */
7599 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7600 {
7601 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
7602 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
7603 pIemCpu->uRexReg = 1 << 3;
7604 pIemCpu->uRexIndex = 1 << 3;
7605
7606 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7607 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7608 }
7609
7610 IEMOP_MNEMONIC("inc eSI");
7611 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
7612}
7613
7614
7615/** Opcode 0x47. */
7616FNIEMOP_DEF(iemOp_inc_eDI)
7617{
7618 /*
7619 * This is a REX prefix in 64-bit mode.
7620 */
7621 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7622 {
7623 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
7624 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
7625 pIemCpu->uRexReg = 1 << 3;
7626 pIemCpu->uRexB = 1 << 3;
7627 pIemCpu->uRexIndex = 1 << 3;
7628
7629 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7630 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7631 }
7632
7633 IEMOP_MNEMONIC("inc eDI");
7634 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
7635}
7636
7637
7638/** Opcode 0x48. */
7639FNIEMOP_DEF(iemOp_dec_eAX)
7640{
7641 /*
7642 * This is a REX prefix in 64-bit mode.
7643 */
7644 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7645 {
7646 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
7647 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
7648 iemRecalEffOpSize(pIemCpu);
7649
7650 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7651 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7652 }
7653
7654 IEMOP_MNEMONIC("dec eAX");
7655 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
7656}
7657
7658
7659/** Opcode 0x49. */
7660FNIEMOP_DEF(iemOp_dec_eCX)
7661{
7662 /*
7663 * This is a REX prefix in 64-bit mode.
7664 */
7665 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7666 {
7667 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
7668 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7669 pIemCpu->uRexB = 1 << 3;
7670 iemRecalEffOpSize(pIemCpu);
7671
7672 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7673 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7674 }
7675
7676 IEMOP_MNEMONIC("dec eCX");
7677 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
7678}
7679
7680
7681/** Opcode 0x4a. */
7682FNIEMOP_DEF(iemOp_dec_eDX)
7683{
7684 /*
7685 * This is a REX prefix in 64-bit mode.
7686 */
7687 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7688 {
7689 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
7690 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7691 pIemCpu->uRexIndex = 1 << 3;
7692 iemRecalEffOpSize(pIemCpu);
7693
7694 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7695 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7696 }
7697
7698 IEMOP_MNEMONIC("dec eDX");
7699 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
7700}
7701
7702
7703/** Opcode 0x4b. */
7704FNIEMOP_DEF(iemOp_dec_eBX)
7705{
7706 /*
7707 * This is a REX prefix in 64-bit mode.
7708 */
7709 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7710 {
7711 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
7712 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7713 pIemCpu->uRexB = 1 << 3;
7714 pIemCpu->uRexIndex = 1 << 3;
7715 iemRecalEffOpSize(pIemCpu);
7716
7717 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7718 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7719 }
7720
7721 IEMOP_MNEMONIC("dec eBX");
7722 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
7723}
7724
7725
7726/** Opcode 0x4c. */
7727FNIEMOP_DEF(iemOp_dec_eSP)
7728{
7729 /*
7730 * This is a REX prefix in 64-bit mode.
7731 */
7732 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7733 {
7734 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
7735 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
7736 pIemCpu->uRexReg = 1 << 3;
7737 iemRecalEffOpSize(pIemCpu);
7738
7739 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7740 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7741 }
7742
7743 IEMOP_MNEMONIC("dec eSP");
7744 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
7745}
7746
7747
7748/** Opcode 0x4d. */
7749FNIEMOP_DEF(iemOp_dec_eBP)
7750{
7751 /*
7752 * This is a REX prefix in 64-bit mode.
7753 */
7754 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7755 {
7756 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
7757 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
7758 pIemCpu->uRexReg = 1 << 3;
7759 pIemCpu->uRexB = 1 << 3;
7760 iemRecalEffOpSize(pIemCpu);
7761
7762 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7763 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7764 }
7765
7766 IEMOP_MNEMONIC("dec eBP");
7767 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
7768}
7769
7770
7771/** Opcode 0x4e. */
7772FNIEMOP_DEF(iemOp_dec_eSI)
7773{
7774 /*
7775 * This is a REX prefix in 64-bit mode.
7776 */
7777 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7778 {
7779 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
7780 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7781 pIemCpu->uRexReg = 1 << 3;
7782 pIemCpu->uRexIndex = 1 << 3;
7783 iemRecalEffOpSize(pIemCpu);
7784
7785 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7786 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7787 }
7788
7789 IEMOP_MNEMONIC("dec eSI");
7790 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
7791}
7792
7793
7794/** Opcode 0x4f. */
7795FNIEMOP_DEF(iemOp_dec_eDI)
7796{
7797 /*
7798 * This is a REX prefix in 64-bit mode.
7799 */
7800 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7801 {
7802 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
7803 pIemCpu->fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
7804 pIemCpu->uRexReg = 1 << 3;
7805 pIemCpu->uRexB = 1 << 3;
7806 pIemCpu->uRexIndex = 1 << 3;
7807 iemRecalEffOpSize(pIemCpu);
7808
7809 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
7810 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
7811 }
7812
7813 IEMOP_MNEMONIC("dec eDI");
7814 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
7815}
7816
7817
7818/**
7819 * Common 'push register' helper.
7820 */
7821FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
7822{
7823 IEMOP_HLP_NO_LOCK_PREFIX();
7824 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7825 {
7826 iReg |= pIemCpu->uRexB;
7827 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
7828 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
7829 }
7830
7831 switch (pIemCpu->enmEffOpSize)
7832 {
7833 case IEMMODE_16BIT:
7834 IEM_MC_BEGIN(0, 1);
7835 IEM_MC_LOCAL(uint16_t, u16Value);
7836 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
7837 IEM_MC_PUSH_U16(u16Value);
7838 IEM_MC_ADVANCE_RIP();
7839 IEM_MC_END();
7840 break;
7841
7842 case IEMMODE_32BIT:
7843 IEM_MC_BEGIN(0, 1);
7844 IEM_MC_LOCAL(uint32_t, u32Value);
7845 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
7846 IEM_MC_PUSH_U32(u32Value);
7847 IEM_MC_ADVANCE_RIP();
7848 IEM_MC_END();
7849 break;
7850
7851 case IEMMODE_64BIT:
7852 IEM_MC_BEGIN(0, 1);
7853 IEM_MC_LOCAL(uint64_t, u64Value);
7854 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
7855 IEM_MC_PUSH_U64(u64Value);
7856 IEM_MC_ADVANCE_RIP();
7857 IEM_MC_END();
7858 break;
7859 }
7860
7861 return VINF_SUCCESS;
7862}
7863
7864
7865/** Opcode 0x50. */
7866FNIEMOP_DEF(iemOp_push_eAX)
7867{
7868 IEMOP_MNEMONIC("push rAX");
7869 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
7870}
7871
7872
7873/** Opcode 0x51. */
7874FNIEMOP_DEF(iemOp_push_eCX)
7875{
7876 IEMOP_MNEMONIC("push rCX");
7877 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
7878}
7879
7880
7881/** Opcode 0x52. */
7882FNIEMOP_DEF(iemOp_push_eDX)
7883{
7884 IEMOP_MNEMONIC("push rDX");
7885 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
7886}
7887
7888
7889/** Opcode 0x53. */
7890FNIEMOP_DEF(iemOp_push_eBX)
7891{
7892 IEMOP_MNEMONIC("push rBX");
7893 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
7894}
7895
7896
7897/** Opcode 0x54. */
7898FNIEMOP_DEF(iemOp_push_eSP)
7899{
7900 IEMOP_MNEMONIC("push rSP");
7901 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
7902}
7903
7904
7905/** Opcode 0x55. */
7906FNIEMOP_DEF(iemOp_push_eBP)
7907{
7908 IEMOP_MNEMONIC("push rBP");
7909 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
7910}
7911
7912
7913/** Opcode 0x56. */
7914FNIEMOP_DEF(iemOp_push_eSI)
7915{
7916 IEMOP_MNEMONIC("push rSI");
7917 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
7918}
7919
7920
7921/** Opcode 0x57. */
7922FNIEMOP_DEF(iemOp_push_eDI)
7923{
7924 IEMOP_MNEMONIC("push rDI");
7925 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
7926}
7927
7928
7929/**
7930 * Common 'pop register' helper.
7931 */
7932FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
7933{
7934 IEMOP_HLP_NO_LOCK_PREFIX();
7935 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
7936 {
7937 iReg |= pIemCpu->uRexB;
7938 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
7939 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
7940 }
7941
7942 switch (pIemCpu->enmEffOpSize)
7943 {
7944 case IEMMODE_16BIT:
7945 IEM_MC_BEGIN(0, 1);
7946 IEM_MC_LOCAL(uint16_t, *pu16Dst);
7947 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
7948 IEM_MC_POP_U16(pu16Dst);
7949 IEM_MC_ADVANCE_RIP();
7950 IEM_MC_END();
7951 break;
7952
7953 case IEMMODE_32BIT:
7954 IEM_MC_BEGIN(0, 1);
7955 IEM_MC_LOCAL(uint32_t, *pu32Dst);
7956 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
7957 IEM_MC_POP_U32(pu32Dst);
7958 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
7959 IEM_MC_ADVANCE_RIP();
7960 IEM_MC_END();
7961 break;
7962
7963 case IEMMODE_64BIT:
7964 IEM_MC_BEGIN(0, 1);
7965 IEM_MC_LOCAL(uint64_t, *pu64Dst);
7966 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
7967 IEM_MC_POP_U64(pu64Dst);
7968 IEM_MC_ADVANCE_RIP();
7969 IEM_MC_END();
7970 break;
7971 }
7972
7973 return VINF_SUCCESS;
7974}
7975
7976
7977/** Opcode 0x58. */
7978FNIEMOP_DEF(iemOp_pop_eAX)
7979{
7980 IEMOP_MNEMONIC("pop rAX");
7981 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
7982}
7983
7984
7985/** Opcode 0x59. */
7986FNIEMOP_DEF(iemOp_pop_eCX)
7987{
7988 IEMOP_MNEMONIC("pop rCX");
7989 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
7990}
7991
7992
7993/** Opcode 0x5a. */
7994FNIEMOP_DEF(iemOp_pop_eDX)
7995{
7996 IEMOP_MNEMONIC("pop rDX");
7997 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
7998}
7999
8000
8001/** Opcode 0x5b. */
8002FNIEMOP_DEF(iemOp_pop_eBX)
8003{
8004 IEMOP_MNEMONIC("pop rBX");
8005 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
8006}
8007
8008
8009/** Opcode 0x5c. */
8010FNIEMOP_DEF(iemOp_pop_eSP)
8011{
8012 IEMOP_MNEMONIC("pop rSP");
8013 if (pIemCpu->enmCpuMode == IEMMODE_64BIT)
8014 {
8015 if (pIemCpu->uRexB)
8016 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
8017 pIemCpu->enmDefOpSize = IEMMODE_64BIT;
8018 pIemCpu->enmEffOpSize = !(pIemCpu->fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
8019 }
8020
8021 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
8022 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
8023 /** @todo add testcase for this instruction. */
8024 switch (pIemCpu->enmEffOpSize)
8025 {
8026 case IEMMODE_16BIT:
8027 IEM_MC_BEGIN(0, 1);
8028 IEM_MC_LOCAL(uint16_t, u16Dst);
8029 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
8030 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
8031 IEM_MC_ADVANCE_RIP();
8032 IEM_MC_END();
8033 break;
8034
8035 case IEMMODE_32BIT:
8036 IEM_MC_BEGIN(0, 1);
8037 IEM_MC_LOCAL(uint32_t, u32Dst);
8038 IEM_MC_POP_U32(&u32Dst);
8039 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
8040 IEM_MC_ADVANCE_RIP();
8041 IEM_MC_END();
8042 break;
8043
8044 case IEMMODE_64BIT:
8045 IEM_MC_BEGIN(0, 1);
8046 IEM_MC_LOCAL(uint64_t, u64Dst);
8047 IEM_MC_POP_U64(&u64Dst);
8048 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
8049 IEM_MC_ADVANCE_RIP();
8050 IEM_MC_END();
8051 break;
8052 }
8053
8054 return VINF_SUCCESS;
8055}
8056
8057
8058/** Opcode 0x5d. */
8059FNIEMOP_DEF(iemOp_pop_eBP)
8060{
8061 IEMOP_MNEMONIC("pop rBP");
8062 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
8063}
8064
8065
8066/** Opcode 0x5e. */
8067FNIEMOP_DEF(iemOp_pop_eSI)
8068{
8069 IEMOP_MNEMONIC("pop rSI");
8070 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
8071}
8072
8073
8074/** Opcode 0x5f. */
8075FNIEMOP_DEF(iemOp_pop_eDI)
8076{
8077 IEMOP_MNEMONIC("pop rDI");
8078 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
8079}
8080
8081
8082/** Opcode 0x60. */
8083FNIEMOP_DEF(iemOp_pusha)
8084{
8085 IEMOP_MNEMONIC("pusha");
8086 IEMOP_HLP_NO_64BIT();
8087 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8088 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
8089 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8090 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
8091}
8092
8093
8094/** Opcode 0x61. */
8095FNIEMOP_DEF(iemOp_popa)
8096{
8097 IEMOP_MNEMONIC("popa");
8098 IEMOP_HLP_NO_64BIT();
8099 if (pIemCpu->enmEffOpSize == IEMMODE_16BIT)
8100 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
8101 Assert(pIemCpu->enmEffOpSize == IEMMODE_32BIT);
8102 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
8103}
8104
8105
8106/** Opcode 0x62. */
8107FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
8108
8109
8110/** Opcode 0x63 - non-64-bit modes. */
8111FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
8112{
8113 IEMOP_MNEMONIC("arpl Ew,Gw");
8114 IEMOP_HLP_NO_REAL_OR_V86_MODE();
8115 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8116
8117 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8118 {
8119 /* Register */
8120 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8121 IEM_MC_BEGIN(3, 0);
8122 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8123 IEM_MC_ARG(uint16_t, u16Src, 1);
8124 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8125
8126 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8127 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
8128 IEM_MC_REF_EFLAGS(pEFlags);
8129 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8130
8131 IEM_MC_ADVANCE_RIP();
8132 IEM_MC_END();
8133 }
8134 else
8135 {
8136 /* Memory */
8137 IEM_MC_BEGIN(3, 2);
8138 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8139 IEM_MC_ARG(uint16_t, u16Src, 1);
8140 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8141 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8142
8143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8144 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
8145 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
8146 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
8147 IEM_MC_FETCH_EFLAGS(EFlags);
8148 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
8149
8150 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8151 IEM_MC_COMMIT_EFLAGS(EFlags);
8152 IEM_MC_ADVANCE_RIP();
8153 IEM_MC_END();
8154 }
8155 return VINF_SUCCESS;
8156
8157}
8158
8159
8160/** Opcode 0x63.
8161 * @note This is a weird one. It works like a regular move instruction if
8162 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
8163 * @todo This definitely needs a testcase to verify the odd cases. */
8164FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
8165{
8166 Assert(pIemCpu->enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
8167
8168 IEMOP_MNEMONIC("movsxd Gv,Ev");
8169 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8170
8171 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8172 {
8173 /*
8174 * Register to register.
8175 */
8176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8177 IEM_MC_BEGIN(0, 1);
8178 IEM_MC_LOCAL(uint64_t, u64Value);
8179 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8180 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8181 IEM_MC_ADVANCE_RIP();
8182 IEM_MC_END();
8183 }
8184 else
8185 {
8186 /*
8187 * We're loading a register from memory.
8188 */
8189 IEM_MC_BEGIN(0, 2);
8190 IEM_MC_LOCAL(uint64_t, u64Value);
8191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8194 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
8195 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
8196 IEM_MC_ADVANCE_RIP();
8197 IEM_MC_END();
8198 }
8199 return VINF_SUCCESS;
8200}
8201
8202
8203/** Opcode 0x64. */
8204FNIEMOP_DEF(iemOp_seg_FS)
8205{
8206 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
8207 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_FS;
8208 pIemCpu->iEffSeg = X86_SREG_FS;
8209
8210 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8211 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8212}
8213
8214
8215/** Opcode 0x65. */
8216FNIEMOP_DEF(iemOp_seg_GS)
8217{
8218 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
8219 pIemCpu->fPrefixes |= IEM_OP_PRF_SEG_GS;
8220 pIemCpu->iEffSeg = X86_SREG_GS;
8221
8222 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8223 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8224}
8225
8226
8227/** Opcode 0x66. */
8228FNIEMOP_DEF(iemOp_op_size)
8229{
8230 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
8231 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_OP;
8232 iemRecalEffOpSize(pIemCpu);
8233
8234 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8235 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8236}
8237
8238
8239/** Opcode 0x67. */
8240FNIEMOP_DEF(iemOp_addr_size)
8241{
8242 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
8243 pIemCpu->fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
8244 switch (pIemCpu->enmDefAddrMode)
8245 {
8246 case IEMMODE_16BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8247 case IEMMODE_32BIT: pIemCpu->enmEffAddrMode = IEMMODE_16BIT; break;
8248 case IEMMODE_64BIT: pIemCpu->enmEffAddrMode = IEMMODE_32BIT; break;
8249 default: AssertFailed();
8250 }
8251
8252 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
8253 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
8254}
8255
8256
8257/** Opcode 0x68. */
8258FNIEMOP_DEF(iemOp_push_Iz)
8259{
8260 IEMOP_MNEMONIC("push Iz");
8261 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8262 switch (pIemCpu->enmEffOpSize)
8263 {
8264 case IEMMODE_16BIT:
8265 {
8266 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8267 IEMOP_HLP_NO_LOCK_PREFIX();
8268 IEM_MC_BEGIN(0,0);
8269 IEM_MC_PUSH_U16(u16Imm);
8270 IEM_MC_ADVANCE_RIP();
8271 IEM_MC_END();
8272 return VINF_SUCCESS;
8273 }
8274
8275 case IEMMODE_32BIT:
8276 {
8277 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8278 IEMOP_HLP_NO_LOCK_PREFIX();
8279 IEM_MC_BEGIN(0,0);
8280 IEM_MC_PUSH_U32(u32Imm);
8281 IEM_MC_ADVANCE_RIP();
8282 IEM_MC_END();
8283 return VINF_SUCCESS;
8284 }
8285
8286 case IEMMODE_64BIT:
8287 {
8288 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8289 IEMOP_HLP_NO_LOCK_PREFIX();
8290 IEM_MC_BEGIN(0,0);
8291 IEM_MC_PUSH_U64(u64Imm);
8292 IEM_MC_ADVANCE_RIP();
8293 IEM_MC_END();
8294 return VINF_SUCCESS;
8295 }
8296
8297 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8298 }
8299}
8300
8301
8302/** Opcode 0x69. */
8303FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
8304{
8305 IEMOP_MNEMONIC("imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
8306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8307 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8308
8309 switch (pIemCpu->enmEffOpSize)
8310 {
8311 case IEMMODE_16BIT:
8312 {
8313 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8314 {
8315 /* register operand */
8316 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8318
8319 IEM_MC_BEGIN(3, 1);
8320 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8321 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
8322 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8323 IEM_MC_LOCAL(uint16_t, u16Tmp);
8324
8325 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8326 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8327 IEM_MC_REF_EFLAGS(pEFlags);
8328 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8329 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8330
8331 IEM_MC_ADVANCE_RIP();
8332 IEM_MC_END();
8333 }
8334 else
8335 {
8336 /* memory operand */
8337 IEM_MC_BEGIN(3, 2);
8338 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8339 IEM_MC_ARG(uint16_t, u16Src, 1);
8340 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8341 IEM_MC_LOCAL(uint16_t, u16Tmp);
8342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8343
8344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8345 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8346 IEM_MC_ASSIGN(u16Src, u16Imm);
8347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8348 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8349 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8350 IEM_MC_REF_EFLAGS(pEFlags);
8351 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8352 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8353
8354 IEM_MC_ADVANCE_RIP();
8355 IEM_MC_END();
8356 }
8357 return VINF_SUCCESS;
8358 }
8359
8360 case IEMMODE_32BIT:
8361 {
8362 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8363 {
8364 /* register operand */
8365 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8367
8368 IEM_MC_BEGIN(3, 1);
8369 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8370 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
8371 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8372 IEM_MC_LOCAL(uint32_t, u32Tmp);
8373
8374 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8375 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8376 IEM_MC_REF_EFLAGS(pEFlags);
8377 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8378 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8379
8380 IEM_MC_ADVANCE_RIP();
8381 IEM_MC_END();
8382 }
8383 else
8384 {
8385 /* memory operand */
8386 IEM_MC_BEGIN(3, 2);
8387 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8388 IEM_MC_ARG(uint32_t, u32Src, 1);
8389 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8390 IEM_MC_LOCAL(uint32_t, u32Tmp);
8391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8392
8393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8394 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8395 IEM_MC_ASSIGN(u32Src, u32Imm);
8396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8397 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8398 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8399 IEM_MC_REF_EFLAGS(pEFlags);
8400 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8401 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8402
8403 IEM_MC_ADVANCE_RIP();
8404 IEM_MC_END();
8405 }
8406 return VINF_SUCCESS;
8407 }
8408
8409 case IEMMODE_64BIT:
8410 {
8411 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8412 {
8413 /* register operand */
8414 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8416
8417 IEM_MC_BEGIN(3, 1);
8418 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8419 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
8420 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8421 IEM_MC_LOCAL(uint64_t, u64Tmp);
8422
8423 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8424 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8425 IEM_MC_REF_EFLAGS(pEFlags);
8426 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8427 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8428
8429 IEM_MC_ADVANCE_RIP();
8430 IEM_MC_END();
8431 }
8432 else
8433 {
8434 /* memory operand */
8435 IEM_MC_BEGIN(3, 2);
8436 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8437 IEM_MC_ARG(uint64_t, u64Src, 1);
8438 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8439 IEM_MC_LOCAL(uint64_t, u64Tmp);
8440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8441
8442 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8443 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8444 IEM_MC_ASSIGN(u64Src, u64Imm);
8445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8446 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8447 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8448 IEM_MC_REF_EFLAGS(pEFlags);
8449 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8450 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8451
8452 IEM_MC_ADVANCE_RIP();
8453 IEM_MC_END();
8454 }
8455 return VINF_SUCCESS;
8456 }
8457 }
8458 AssertFailedReturn(VERR_IEM_IPE_9);
8459}
8460
8461
8462/** Opcode 0x6a. */
8463FNIEMOP_DEF(iemOp_push_Ib)
8464{
8465 IEMOP_MNEMONIC("push Ib");
8466 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8467 IEMOP_HLP_NO_LOCK_PREFIX();
8468 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8469
8470 IEM_MC_BEGIN(0,0);
8471 switch (pIemCpu->enmEffOpSize)
8472 {
8473 case IEMMODE_16BIT:
8474 IEM_MC_PUSH_U16(i8Imm);
8475 break;
8476 case IEMMODE_32BIT:
8477 IEM_MC_PUSH_U32(i8Imm);
8478 break;
8479 case IEMMODE_64BIT:
8480 IEM_MC_PUSH_U64(i8Imm);
8481 break;
8482 }
8483 IEM_MC_ADVANCE_RIP();
8484 IEM_MC_END();
8485 return VINF_SUCCESS;
8486}
8487
8488
8489/** Opcode 0x6b. */
8490FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
8491{
8492 IEMOP_MNEMONIC("imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
8493 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8494 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
8495
8496 switch (pIemCpu->enmEffOpSize)
8497 {
8498 case IEMMODE_16BIT:
8499 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8500 {
8501 /* register operand */
8502 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8504
8505 IEM_MC_BEGIN(3, 1);
8506 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8507 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
8508 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8509 IEM_MC_LOCAL(uint16_t, u16Tmp);
8510
8511 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8512 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8513 IEM_MC_REF_EFLAGS(pEFlags);
8514 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8515 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8516
8517 IEM_MC_ADVANCE_RIP();
8518 IEM_MC_END();
8519 }
8520 else
8521 {
8522 /* memory operand */
8523 IEM_MC_BEGIN(3, 2);
8524 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8525 IEM_MC_ARG(uint16_t, u16Src, 1);
8526 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8527 IEM_MC_LOCAL(uint16_t, u16Tmp);
8528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8529
8530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8531 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
8532 IEM_MC_ASSIGN(u16Src, u16Imm);
8533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8534 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8535 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
8536 IEM_MC_REF_EFLAGS(pEFlags);
8537 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
8538 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp);
8539
8540 IEM_MC_ADVANCE_RIP();
8541 IEM_MC_END();
8542 }
8543 return VINF_SUCCESS;
8544
8545 case IEMMODE_32BIT:
8546 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8547 {
8548 /* register operand */
8549 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8551
8552 IEM_MC_BEGIN(3, 1);
8553 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8554 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
8555 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8556 IEM_MC_LOCAL(uint32_t, u32Tmp);
8557
8558 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8559 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8560 IEM_MC_REF_EFLAGS(pEFlags);
8561 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8562 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8563
8564 IEM_MC_ADVANCE_RIP();
8565 IEM_MC_END();
8566 }
8567 else
8568 {
8569 /* memory operand */
8570 IEM_MC_BEGIN(3, 2);
8571 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8572 IEM_MC_ARG(uint32_t, u32Src, 1);
8573 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8574 IEM_MC_LOCAL(uint32_t, u32Tmp);
8575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8576
8577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8578 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
8579 IEM_MC_ASSIGN(u32Src, u32Imm);
8580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8581 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8582 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
8583 IEM_MC_REF_EFLAGS(pEFlags);
8584 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
8585 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp);
8586
8587 IEM_MC_ADVANCE_RIP();
8588 IEM_MC_END();
8589 }
8590 return VINF_SUCCESS;
8591
8592 case IEMMODE_64BIT:
8593 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8594 {
8595 /* register operand */
8596 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8598
8599 IEM_MC_BEGIN(3, 1);
8600 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8601 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
8602 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8603 IEM_MC_LOCAL(uint64_t, u64Tmp);
8604
8605 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
8606 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8607 IEM_MC_REF_EFLAGS(pEFlags);
8608 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8609 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8610
8611 IEM_MC_ADVANCE_RIP();
8612 IEM_MC_END();
8613 }
8614 else
8615 {
8616 /* memory operand */
8617 IEM_MC_BEGIN(3, 2);
8618 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8619 IEM_MC_ARG(uint64_t, u64Src, 1);
8620 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8621 IEM_MC_LOCAL(uint64_t, u64Tmp);
8622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8623
8624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8625 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
8626 IEM_MC_ASSIGN(u64Src, u64Imm);
8627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8628 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffDst);
8629 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
8630 IEM_MC_REF_EFLAGS(pEFlags);
8631 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
8632 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp);
8633
8634 IEM_MC_ADVANCE_RIP();
8635 IEM_MC_END();
8636 }
8637 return VINF_SUCCESS;
8638 }
8639 AssertFailedReturn(VERR_IEM_IPE_8);
8640}
8641
8642
8643/** Opcode 0x6c. */
8644FNIEMOP_DEF(iemOp_insb_Yb_DX)
8645{
8646 IEMOP_HLP_NO_LOCK_PREFIX();
8647 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8648 {
8649 IEMOP_MNEMONIC("rep ins Yb,DX");
8650 switch (pIemCpu->enmEffAddrMode)
8651 {
8652 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
8653 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
8654 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
8655 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8656 }
8657 }
8658 else
8659 {
8660 IEMOP_MNEMONIC("ins Yb,DX");
8661 switch (pIemCpu->enmEffAddrMode)
8662 {
8663 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
8664 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
8665 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
8666 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8667 }
8668 }
8669}
8670
8671
8672/** Opcode 0x6d. */
8673FNIEMOP_DEF(iemOp_inswd_Yv_DX)
8674{
8675 IEMOP_HLP_NO_LOCK_PREFIX();
8676 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
8677 {
8678 IEMOP_MNEMONIC("rep ins Yv,DX");
8679 switch (pIemCpu->enmEffOpSize)
8680 {
8681 case IEMMODE_16BIT:
8682 switch (pIemCpu->enmEffAddrMode)
8683 {
8684 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
8685 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
8686 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
8687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8688 }
8689 break;
8690 case IEMMODE_64BIT:
8691 case IEMMODE_32BIT:
8692 switch (pIemCpu->enmEffAddrMode)
8693 {
8694 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
8695 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
8696 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
8697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8698 }
8699 break;
8700 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8701 }
8702 }
8703 else
8704 {
8705 IEMOP_MNEMONIC("ins Yv,DX");
8706 switch (pIemCpu->enmEffOpSize)
8707 {
8708 case IEMMODE_16BIT:
8709 switch (pIemCpu->enmEffAddrMode)
8710 {
8711 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
8712 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
8713 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
8714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8715 }
8716 break;
8717 case IEMMODE_64BIT:
8718 case IEMMODE_32BIT:
8719 switch (pIemCpu->enmEffAddrMode)
8720 {
8721 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
8722 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
8723 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
8724 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8725 }
8726 break;
8727 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8728 }
8729 }
8730}
8731
8732
8733/** Opcode 0x6e. */
8734FNIEMOP_DEF(iemOp_outsb_Yb_DX)
8735{
8736 IEMOP_HLP_NO_LOCK_PREFIX();
8737 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
8738 {
8739 IEMOP_MNEMONIC("rep outs DX,Yb");
8740 switch (pIemCpu->enmEffAddrMode)
8741 {
8742 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pIemCpu->iEffSeg, false);
8743 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pIemCpu->iEffSeg, false);
8744 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pIemCpu->iEffSeg, false);
8745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8746 }
8747 }
8748 else
8749 {
8750 IEMOP_MNEMONIC("outs DX,Yb");
8751 switch (pIemCpu->enmEffAddrMode)
8752 {
8753 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pIemCpu->iEffSeg, false);
8754 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pIemCpu->iEffSeg, false);
8755 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pIemCpu->iEffSeg, false);
8756 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8757 }
8758 }
8759}
8760
8761
8762/** Opcode 0x6f. */
8763FNIEMOP_DEF(iemOp_outswd_Yv_DX)
8764{
8765 IEMOP_HLP_NO_LOCK_PREFIX();
8766 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
8767 {
8768 IEMOP_MNEMONIC("rep outs DX,Yv");
8769 switch (pIemCpu->enmEffOpSize)
8770 {
8771 case IEMMODE_16BIT:
8772 switch (pIemCpu->enmEffAddrMode)
8773 {
8774 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pIemCpu->iEffSeg, false);
8775 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pIemCpu->iEffSeg, false);
8776 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pIemCpu->iEffSeg, false);
8777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8778 }
8779 break;
8780 case IEMMODE_64BIT:
8781 case IEMMODE_32BIT:
8782 switch (pIemCpu->enmEffAddrMode)
8783 {
8784 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pIemCpu->iEffSeg, false);
8785 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pIemCpu->iEffSeg, false);
8786 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pIemCpu->iEffSeg, false);
8787 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8788 }
8789 break;
8790 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8791 }
8792 }
8793 else
8794 {
8795 IEMOP_MNEMONIC("outs DX,Yv");
8796 switch (pIemCpu->enmEffOpSize)
8797 {
8798 case IEMMODE_16BIT:
8799 switch (pIemCpu->enmEffAddrMode)
8800 {
8801 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pIemCpu->iEffSeg, false);
8802 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pIemCpu->iEffSeg, false);
8803 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pIemCpu->iEffSeg, false);
8804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8805 }
8806 break;
8807 case IEMMODE_64BIT:
8808 case IEMMODE_32BIT:
8809 switch (pIemCpu->enmEffAddrMode)
8810 {
8811 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pIemCpu->iEffSeg, false);
8812 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pIemCpu->iEffSeg, false);
8813 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pIemCpu->iEffSeg, false);
8814 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8815 }
8816 break;
8817 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8818 }
8819 }
8820}
8821
8822
8823/** Opcode 0x70. */
8824FNIEMOP_DEF(iemOp_jo_Jb)
8825{
8826 IEMOP_MNEMONIC("jo Jb");
8827 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8828 IEMOP_HLP_NO_LOCK_PREFIX();
8829 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8830
8831 IEM_MC_BEGIN(0, 0);
8832 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8833 IEM_MC_REL_JMP_S8(i8Imm);
8834 } IEM_MC_ELSE() {
8835 IEM_MC_ADVANCE_RIP();
8836 } IEM_MC_ENDIF();
8837 IEM_MC_END();
8838 return VINF_SUCCESS;
8839}
8840
8841
8842/** Opcode 0x71. */
8843FNIEMOP_DEF(iemOp_jno_Jb)
8844{
8845 IEMOP_MNEMONIC("jno Jb");
8846 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8847 IEMOP_HLP_NO_LOCK_PREFIX();
8848 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8849
8850 IEM_MC_BEGIN(0, 0);
8851 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
8852 IEM_MC_ADVANCE_RIP();
8853 } IEM_MC_ELSE() {
8854 IEM_MC_REL_JMP_S8(i8Imm);
8855 } IEM_MC_ENDIF();
8856 IEM_MC_END();
8857 return VINF_SUCCESS;
8858}
8859
8860/** Opcode 0x72. */
8861FNIEMOP_DEF(iemOp_jc_Jb)
8862{
8863 IEMOP_MNEMONIC("jc/jnae Jb");
8864 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8865 IEMOP_HLP_NO_LOCK_PREFIX();
8866 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8867
8868 IEM_MC_BEGIN(0, 0);
8869 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8870 IEM_MC_REL_JMP_S8(i8Imm);
8871 } IEM_MC_ELSE() {
8872 IEM_MC_ADVANCE_RIP();
8873 } IEM_MC_ENDIF();
8874 IEM_MC_END();
8875 return VINF_SUCCESS;
8876}
8877
8878
8879/** Opcode 0x73. */
8880FNIEMOP_DEF(iemOp_jnc_Jb)
8881{
8882 IEMOP_MNEMONIC("jnc/jnb Jb");
8883 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8884 IEMOP_HLP_NO_LOCK_PREFIX();
8885 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8886
8887 IEM_MC_BEGIN(0, 0);
8888 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8889 IEM_MC_ADVANCE_RIP();
8890 } IEM_MC_ELSE() {
8891 IEM_MC_REL_JMP_S8(i8Imm);
8892 } IEM_MC_ENDIF();
8893 IEM_MC_END();
8894 return VINF_SUCCESS;
8895}
8896
8897
8898/** Opcode 0x74. */
8899FNIEMOP_DEF(iemOp_je_Jb)
8900{
8901 IEMOP_MNEMONIC("je/jz Jb");
8902 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8903 IEMOP_HLP_NO_LOCK_PREFIX();
8904 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8905
8906 IEM_MC_BEGIN(0, 0);
8907 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8908 IEM_MC_REL_JMP_S8(i8Imm);
8909 } IEM_MC_ELSE() {
8910 IEM_MC_ADVANCE_RIP();
8911 } IEM_MC_ENDIF();
8912 IEM_MC_END();
8913 return VINF_SUCCESS;
8914}
8915
8916
8917/** Opcode 0x75. */
8918FNIEMOP_DEF(iemOp_jne_Jb)
8919{
8920 IEMOP_MNEMONIC("jne/jnz Jb");
8921 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8922 IEMOP_HLP_NO_LOCK_PREFIX();
8923 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8924
8925 IEM_MC_BEGIN(0, 0);
8926 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
8927 IEM_MC_ADVANCE_RIP();
8928 } IEM_MC_ELSE() {
8929 IEM_MC_REL_JMP_S8(i8Imm);
8930 } IEM_MC_ENDIF();
8931 IEM_MC_END();
8932 return VINF_SUCCESS;
8933}
8934
8935
8936/** Opcode 0x76. */
8937FNIEMOP_DEF(iemOp_jbe_Jb)
8938{
8939 IEMOP_MNEMONIC("jbe/jna Jb");
8940 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8941 IEMOP_HLP_NO_LOCK_PREFIX();
8942 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8943
8944 IEM_MC_BEGIN(0, 0);
8945 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8946 IEM_MC_REL_JMP_S8(i8Imm);
8947 } IEM_MC_ELSE() {
8948 IEM_MC_ADVANCE_RIP();
8949 } IEM_MC_ENDIF();
8950 IEM_MC_END();
8951 return VINF_SUCCESS;
8952}
8953
8954
8955/** Opcode 0x77. */
8956FNIEMOP_DEF(iemOp_jnbe_Jb)
8957{
8958 IEMOP_MNEMONIC("jnbe/ja Jb");
8959 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8960 IEMOP_HLP_NO_LOCK_PREFIX();
8961 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8962
8963 IEM_MC_BEGIN(0, 0);
8964 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
8965 IEM_MC_ADVANCE_RIP();
8966 } IEM_MC_ELSE() {
8967 IEM_MC_REL_JMP_S8(i8Imm);
8968 } IEM_MC_ENDIF();
8969 IEM_MC_END();
8970 return VINF_SUCCESS;
8971}
8972
8973
8974/** Opcode 0x78. */
8975FNIEMOP_DEF(iemOp_js_Jb)
8976{
8977 IEMOP_MNEMONIC("js Jb");
8978 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8979 IEMOP_HLP_NO_LOCK_PREFIX();
8980 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8981
8982 IEM_MC_BEGIN(0, 0);
8983 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
8984 IEM_MC_REL_JMP_S8(i8Imm);
8985 } IEM_MC_ELSE() {
8986 IEM_MC_ADVANCE_RIP();
8987 } IEM_MC_ENDIF();
8988 IEM_MC_END();
8989 return VINF_SUCCESS;
8990}
8991
8992
8993/** Opcode 0x79. */
8994FNIEMOP_DEF(iemOp_jns_Jb)
8995{
8996 IEMOP_MNEMONIC("jns Jb");
8997 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
8998 IEMOP_HLP_NO_LOCK_PREFIX();
8999 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9000
9001 IEM_MC_BEGIN(0, 0);
9002 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
9003 IEM_MC_ADVANCE_RIP();
9004 } IEM_MC_ELSE() {
9005 IEM_MC_REL_JMP_S8(i8Imm);
9006 } IEM_MC_ENDIF();
9007 IEM_MC_END();
9008 return VINF_SUCCESS;
9009}
9010
9011
9012/** Opcode 0x7a. */
9013FNIEMOP_DEF(iemOp_jp_Jb)
9014{
9015 IEMOP_MNEMONIC("jp Jb");
9016 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9017 IEMOP_HLP_NO_LOCK_PREFIX();
9018 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9019
9020 IEM_MC_BEGIN(0, 0);
9021 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9022 IEM_MC_REL_JMP_S8(i8Imm);
9023 } IEM_MC_ELSE() {
9024 IEM_MC_ADVANCE_RIP();
9025 } IEM_MC_ENDIF();
9026 IEM_MC_END();
9027 return VINF_SUCCESS;
9028}
9029
9030
9031/** Opcode 0x7b. */
9032FNIEMOP_DEF(iemOp_jnp_Jb)
9033{
9034 IEMOP_MNEMONIC("jnp Jb");
9035 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9036 IEMOP_HLP_NO_LOCK_PREFIX();
9037 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9038
9039 IEM_MC_BEGIN(0, 0);
9040 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9041 IEM_MC_ADVANCE_RIP();
9042 } IEM_MC_ELSE() {
9043 IEM_MC_REL_JMP_S8(i8Imm);
9044 } IEM_MC_ENDIF();
9045 IEM_MC_END();
9046 return VINF_SUCCESS;
9047}
9048
9049
9050/** Opcode 0x7c. */
9051FNIEMOP_DEF(iemOp_jl_Jb)
9052{
9053 IEMOP_MNEMONIC("jl/jnge Jb");
9054 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9055 IEMOP_HLP_NO_LOCK_PREFIX();
9056 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9057
9058 IEM_MC_BEGIN(0, 0);
9059 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9060 IEM_MC_REL_JMP_S8(i8Imm);
9061 } IEM_MC_ELSE() {
9062 IEM_MC_ADVANCE_RIP();
9063 } IEM_MC_ENDIF();
9064 IEM_MC_END();
9065 return VINF_SUCCESS;
9066}
9067
9068
9069/** Opcode 0x7d. */
9070FNIEMOP_DEF(iemOp_jnl_Jb)
9071{
9072 IEMOP_MNEMONIC("jnl/jge Jb");
9073 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9074 IEMOP_HLP_NO_LOCK_PREFIX();
9075 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9076
9077 IEM_MC_BEGIN(0, 0);
9078 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
9079 IEM_MC_ADVANCE_RIP();
9080 } IEM_MC_ELSE() {
9081 IEM_MC_REL_JMP_S8(i8Imm);
9082 } IEM_MC_ENDIF();
9083 IEM_MC_END();
9084 return VINF_SUCCESS;
9085}
9086
9087
9088/** Opcode 0x7e. */
9089FNIEMOP_DEF(iemOp_jle_Jb)
9090{
9091 IEMOP_MNEMONIC("jle/jng Jb");
9092 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9093 IEMOP_HLP_NO_LOCK_PREFIX();
9094 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9095
9096 IEM_MC_BEGIN(0, 0);
9097 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9098 IEM_MC_REL_JMP_S8(i8Imm);
9099 } IEM_MC_ELSE() {
9100 IEM_MC_ADVANCE_RIP();
9101 } IEM_MC_ENDIF();
9102 IEM_MC_END();
9103 return VINF_SUCCESS;
9104}
9105
9106
9107/** Opcode 0x7f. */
9108FNIEMOP_DEF(iemOp_jnle_Jb)
9109{
9110 IEMOP_MNEMONIC("jnle/jg Jb");
9111 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9112 IEMOP_HLP_NO_LOCK_PREFIX();
9113 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9114
9115 IEM_MC_BEGIN(0, 0);
9116 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
9117 IEM_MC_ADVANCE_RIP();
9118 } IEM_MC_ELSE() {
9119 IEM_MC_REL_JMP_S8(i8Imm);
9120 } IEM_MC_ENDIF();
9121 IEM_MC_END();
9122 return VINF_SUCCESS;
9123}
9124
9125
9126/** Opcode 0x80. */
9127FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
9128{
9129 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9130 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Eb,Ib");
9131 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9132
9133 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9134 {
9135 /* register target */
9136 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9137 IEMOP_HLP_NO_LOCK_PREFIX();
9138 IEM_MC_BEGIN(3, 0);
9139 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9140 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9141 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9142
9143 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9144 IEM_MC_REF_EFLAGS(pEFlags);
9145 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9146
9147 IEM_MC_ADVANCE_RIP();
9148 IEM_MC_END();
9149 }
9150 else
9151 {
9152 /* memory target */
9153 uint32_t fAccess;
9154 if (pImpl->pfnLockedU8)
9155 fAccess = IEM_ACCESS_DATA_RW;
9156 else
9157 { /* CMP */
9158 IEMOP_HLP_NO_LOCK_PREFIX();
9159 fAccess = IEM_ACCESS_DATA_R;
9160 }
9161 IEM_MC_BEGIN(3, 2);
9162 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9163 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9165
9166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9167 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9168 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
9169
9170 IEM_MC_MEM_MAP(pu8Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9171 IEM_MC_FETCH_EFLAGS(EFlags);
9172 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9173 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
9174 else
9175 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
9176
9177 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
9178 IEM_MC_COMMIT_EFLAGS(EFlags);
9179 IEM_MC_ADVANCE_RIP();
9180 IEM_MC_END();
9181 }
9182 return VINF_SUCCESS;
9183}
9184
9185
9186/** Opcode 0x81. */
9187FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
9188{
9189 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9190 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Iz");
9191 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9192
9193 switch (pIemCpu->enmEffOpSize)
9194 {
9195 case IEMMODE_16BIT:
9196 {
9197 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9198 {
9199 /* register target */
9200 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9201 IEMOP_HLP_NO_LOCK_PREFIX();
9202 IEM_MC_BEGIN(3, 0);
9203 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9204 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
9205 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9206
9207 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9208 IEM_MC_REF_EFLAGS(pEFlags);
9209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9210
9211 IEM_MC_ADVANCE_RIP();
9212 IEM_MC_END();
9213 }
9214 else
9215 {
9216 /* memory target */
9217 uint32_t fAccess;
9218 if (pImpl->pfnLockedU16)
9219 fAccess = IEM_ACCESS_DATA_RW;
9220 else
9221 { /* CMP, TEST */
9222 IEMOP_HLP_NO_LOCK_PREFIX();
9223 fAccess = IEM_ACCESS_DATA_R;
9224 }
9225 IEM_MC_BEGIN(3, 2);
9226 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9227 IEM_MC_ARG(uint16_t, u16Src, 1);
9228 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9230
9231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9232 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9233 IEM_MC_ASSIGN(u16Src, u16Imm);
9234 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9235 IEM_MC_FETCH_EFLAGS(EFlags);
9236 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9237 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9238 else
9239 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9240
9241 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9242 IEM_MC_COMMIT_EFLAGS(EFlags);
9243 IEM_MC_ADVANCE_RIP();
9244 IEM_MC_END();
9245 }
9246 break;
9247 }
9248
9249 case IEMMODE_32BIT:
9250 {
9251 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9252 {
9253 /* register target */
9254 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9255 IEMOP_HLP_NO_LOCK_PREFIX();
9256 IEM_MC_BEGIN(3, 0);
9257 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9258 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
9259 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9260
9261 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9262 IEM_MC_REF_EFLAGS(pEFlags);
9263 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9264 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9265
9266 IEM_MC_ADVANCE_RIP();
9267 IEM_MC_END();
9268 }
9269 else
9270 {
9271 /* memory target */
9272 uint32_t fAccess;
9273 if (pImpl->pfnLockedU32)
9274 fAccess = IEM_ACCESS_DATA_RW;
9275 else
9276 { /* CMP, TEST */
9277 IEMOP_HLP_NO_LOCK_PREFIX();
9278 fAccess = IEM_ACCESS_DATA_R;
9279 }
9280 IEM_MC_BEGIN(3, 2);
9281 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9282 IEM_MC_ARG(uint32_t, u32Src, 1);
9283 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9285
9286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9287 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9288 IEM_MC_ASSIGN(u32Src, u32Imm);
9289 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9290 IEM_MC_FETCH_EFLAGS(EFlags);
9291 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9292 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9293 else
9294 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9295
9296 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9297 IEM_MC_COMMIT_EFLAGS(EFlags);
9298 IEM_MC_ADVANCE_RIP();
9299 IEM_MC_END();
9300 }
9301 break;
9302 }
9303
9304 case IEMMODE_64BIT:
9305 {
9306 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9307 {
9308 /* register target */
9309 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9310 IEMOP_HLP_NO_LOCK_PREFIX();
9311 IEM_MC_BEGIN(3, 0);
9312 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9313 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
9314 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9315
9316 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9317 IEM_MC_REF_EFLAGS(pEFlags);
9318 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9319
9320 IEM_MC_ADVANCE_RIP();
9321 IEM_MC_END();
9322 }
9323 else
9324 {
9325 /* memory target */
9326 uint32_t fAccess;
9327 if (pImpl->pfnLockedU64)
9328 fAccess = IEM_ACCESS_DATA_RW;
9329 else
9330 { /* CMP */
9331 IEMOP_HLP_NO_LOCK_PREFIX();
9332 fAccess = IEM_ACCESS_DATA_R;
9333 }
9334 IEM_MC_BEGIN(3, 2);
9335 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9336 IEM_MC_ARG(uint64_t, u64Src, 1);
9337 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9339
9340 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9341 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9342 IEM_MC_ASSIGN(u64Src, u64Imm);
9343 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9344 IEM_MC_FETCH_EFLAGS(EFlags);
9345 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9347 else
9348 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9349
9350 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9351 IEM_MC_COMMIT_EFLAGS(EFlags);
9352 IEM_MC_ADVANCE_RIP();
9353 IEM_MC_END();
9354 }
9355 break;
9356 }
9357 }
9358 return VINF_SUCCESS;
9359}
9360
9361
9362/** Opcode 0x82. */
9363FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
9364{
9365 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
9366 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
9367}
9368
9369
9370/** Opcode 0x83. */
9371FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
9372{
9373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9374 IEMOP_MNEMONIC2("add\0or\0\0adc\0sbb\0and\0sub\0xor\0cmp" + ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)*4, "Ev,Ib");
9375 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
9376
9377 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9378 {
9379 /*
9380 * Register target
9381 */
9382 IEMOP_HLP_NO_LOCK_PREFIX();
9383 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9384 switch (pIemCpu->enmEffOpSize)
9385 {
9386 case IEMMODE_16BIT:
9387 {
9388 IEM_MC_BEGIN(3, 0);
9389 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9390 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
9391 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9392
9393 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9394 IEM_MC_REF_EFLAGS(pEFlags);
9395 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9396
9397 IEM_MC_ADVANCE_RIP();
9398 IEM_MC_END();
9399 break;
9400 }
9401
9402 case IEMMODE_32BIT:
9403 {
9404 IEM_MC_BEGIN(3, 0);
9405 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9406 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
9407 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9408
9409 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9410 IEM_MC_REF_EFLAGS(pEFlags);
9411 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9412 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
9413
9414 IEM_MC_ADVANCE_RIP();
9415 IEM_MC_END();
9416 break;
9417 }
9418
9419 case IEMMODE_64BIT:
9420 {
9421 IEM_MC_BEGIN(3, 0);
9422 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9423 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
9424 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9425
9426 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9427 IEM_MC_REF_EFLAGS(pEFlags);
9428 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9429
9430 IEM_MC_ADVANCE_RIP();
9431 IEM_MC_END();
9432 break;
9433 }
9434 }
9435 }
9436 else
9437 {
9438 /*
9439 * Memory target.
9440 */
9441 uint32_t fAccess;
9442 if (pImpl->pfnLockedU16)
9443 fAccess = IEM_ACCESS_DATA_RW;
9444 else
9445 { /* CMP */
9446 IEMOP_HLP_NO_LOCK_PREFIX();
9447 fAccess = IEM_ACCESS_DATA_R;
9448 }
9449
9450 switch (pIemCpu->enmEffOpSize)
9451 {
9452 case IEMMODE_16BIT:
9453 {
9454 IEM_MC_BEGIN(3, 2);
9455 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9456 IEM_MC_ARG(uint16_t, u16Src, 1);
9457 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9459
9460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9461 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9462 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
9463 IEM_MC_MEM_MAP(pu16Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9464 IEM_MC_FETCH_EFLAGS(EFlags);
9465 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9466 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
9467 else
9468 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
9469
9470 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
9471 IEM_MC_COMMIT_EFLAGS(EFlags);
9472 IEM_MC_ADVANCE_RIP();
9473 IEM_MC_END();
9474 break;
9475 }
9476
9477 case IEMMODE_32BIT:
9478 {
9479 IEM_MC_BEGIN(3, 2);
9480 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9481 IEM_MC_ARG(uint32_t, u32Src, 1);
9482 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9483 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9484
9485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9486 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9487 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
9488 IEM_MC_MEM_MAP(pu32Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9489 IEM_MC_FETCH_EFLAGS(EFlags);
9490 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9491 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
9492 else
9493 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
9494
9495 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
9496 IEM_MC_COMMIT_EFLAGS(EFlags);
9497 IEM_MC_ADVANCE_RIP();
9498 IEM_MC_END();
9499 break;
9500 }
9501
9502 case IEMMODE_64BIT:
9503 {
9504 IEM_MC_BEGIN(3, 2);
9505 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9506 IEM_MC_ARG(uint64_t, u64Src, 1);
9507 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9509
9510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9511 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9512 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
9513 IEM_MC_MEM_MAP(pu64Dst, fAccess, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9514 IEM_MC_FETCH_EFLAGS(EFlags);
9515 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
9516 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
9517 else
9518 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
9519
9520 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
9521 IEM_MC_COMMIT_EFLAGS(EFlags);
9522 IEM_MC_ADVANCE_RIP();
9523 IEM_MC_END();
9524 break;
9525 }
9526 }
9527 }
9528 return VINF_SUCCESS;
9529}
9530
9531
9532/** Opcode 0x84. */
9533FNIEMOP_DEF(iemOp_test_Eb_Gb)
9534{
9535 IEMOP_MNEMONIC("test Eb,Gb");
9536 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9537 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9538 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
9539}
9540
9541
9542/** Opcode 0x85. */
9543FNIEMOP_DEF(iemOp_test_Ev_Gv)
9544{
9545 IEMOP_MNEMONIC("test Ev,Gv");
9546 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo do we have to decode the whole instruction first? */
9547 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9548 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
9549}
9550
9551
9552/** Opcode 0x86. */
9553FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
9554{
9555 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9556 IEMOP_MNEMONIC("xchg Eb,Gb");
9557
9558 /*
9559 * If rm is denoting a register, no more instruction bytes.
9560 */
9561 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9562 {
9563 IEMOP_HLP_NO_LOCK_PREFIX();
9564
9565 IEM_MC_BEGIN(0, 2);
9566 IEM_MC_LOCAL(uint8_t, uTmp1);
9567 IEM_MC_LOCAL(uint8_t, uTmp2);
9568
9569 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9570 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9571 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9572 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9573
9574 IEM_MC_ADVANCE_RIP();
9575 IEM_MC_END();
9576 }
9577 else
9578 {
9579 /*
9580 * We're accessing memory.
9581 */
9582/** @todo the register must be committed separately! */
9583 IEM_MC_BEGIN(2, 2);
9584 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
9585 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
9586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9587
9588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9589 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9590 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9591 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
9592 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
9593
9594 IEM_MC_ADVANCE_RIP();
9595 IEM_MC_END();
9596 }
9597 return VINF_SUCCESS;
9598}
9599
9600
9601/** Opcode 0x87. */
9602FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
9603{
9604 IEMOP_MNEMONIC("xchg Ev,Gv");
9605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9606
9607 /*
9608 * If rm is denoting a register, no more instruction bytes.
9609 */
9610 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9611 {
9612 IEMOP_HLP_NO_LOCK_PREFIX();
9613
9614 switch (pIemCpu->enmEffOpSize)
9615 {
9616 case IEMMODE_16BIT:
9617 IEM_MC_BEGIN(0, 2);
9618 IEM_MC_LOCAL(uint16_t, uTmp1);
9619 IEM_MC_LOCAL(uint16_t, uTmp2);
9620
9621 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9622 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9623 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9624 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9625
9626 IEM_MC_ADVANCE_RIP();
9627 IEM_MC_END();
9628 return VINF_SUCCESS;
9629
9630 case IEMMODE_32BIT:
9631 IEM_MC_BEGIN(0, 2);
9632 IEM_MC_LOCAL(uint32_t, uTmp1);
9633 IEM_MC_LOCAL(uint32_t, uTmp2);
9634
9635 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9636 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9637 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9638 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9639
9640 IEM_MC_ADVANCE_RIP();
9641 IEM_MC_END();
9642 return VINF_SUCCESS;
9643
9644 case IEMMODE_64BIT:
9645 IEM_MC_BEGIN(0, 2);
9646 IEM_MC_LOCAL(uint64_t, uTmp1);
9647 IEM_MC_LOCAL(uint64_t, uTmp2);
9648
9649 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9650 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9651 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, uTmp1);
9652 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, uTmp2);
9653
9654 IEM_MC_ADVANCE_RIP();
9655 IEM_MC_END();
9656 return VINF_SUCCESS;
9657
9658 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9659 }
9660 }
9661 else
9662 {
9663 /*
9664 * We're accessing memory.
9665 */
9666 switch (pIemCpu->enmEffOpSize)
9667 {
9668/** @todo the register must be committed separately! */
9669 case IEMMODE_16BIT:
9670 IEM_MC_BEGIN(2, 2);
9671 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
9672 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
9673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9674
9675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9676 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9677 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9678 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
9679 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
9680
9681 IEM_MC_ADVANCE_RIP();
9682 IEM_MC_END();
9683 return VINF_SUCCESS;
9684
9685 case IEMMODE_32BIT:
9686 IEM_MC_BEGIN(2, 2);
9687 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
9688 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
9689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9690
9691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9692 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9693 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9694 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
9695 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
9696
9697 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
9698 IEM_MC_ADVANCE_RIP();
9699 IEM_MC_END();
9700 return VINF_SUCCESS;
9701
9702 case IEMMODE_64BIT:
9703 IEM_MC_BEGIN(2, 2);
9704 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
9705 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
9706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9707
9708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9709 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
9710 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9711 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
9712 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
9713
9714 IEM_MC_ADVANCE_RIP();
9715 IEM_MC_END();
9716 return VINF_SUCCESS;
9717
9718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9719 }
9720 }
9721}
9722
9723
9724/** Opcode 0x88. */
9725FNIEMOP_DEF(iemOp_mov_Eb_Gb)
9726{
9727 IEMOP_MNEMONIC("mov Eb,Gb");
9728
9729 uint8_t bRm;
9730 IEM_OPCODE_GET_NEXT_U8(&bRm);
9731 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9732
9733 /*
9734 * If rm is denoting a register, no more instruction bytes.
9735 */
9736 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9737 {
9738 IEM_MC_BEGIN(0, 1);
9739 IEM_MC_LOCAL(uint8_t, u8Value);
9740 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9741 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Value);
9742 IEM_MC_ADVANCE_RIP();
9743 IEM_MC_END();
9744 }
9745 else
9746 {
9747 /*
9748 * We're writing a register to memory.
9749 */
9750 IEM_MC_BEGIN(0, 2);
9751 IEM_MC_LOCAL(uint8_t, u8Value);
9752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9754 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9755 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Value);
9756 IEM_MC_ADVANCE_RIP();
9757 IEM_MC_END();
9758 }
9759 return VINF_SUCCESS;
9760
9761}
9762
9763
9764/** Opcode 0x89. */
9765FNIEMOP_DEF(iemOp_mov_Ev_Gv)
9766{
9767 IEMOP_MNEMONIC("mov Ev,Gv");
9768
9769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9770 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9771
9772 /*
9773 * If rm is denoting a register, no more instruction bytes.
9774 */
9775 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9776 {
9777 switch (pIemCpu->enmEffOpSize)
9778 {
9779 case IEMMODE_16BIT:
9780 IEM_MC_BEGIN(0, 1);
9781 IEM_MC_LOCAL(uint16_t, u16Value);
9782 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9783 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
9784 IEM_MC_ADVANCE_RIP();
9785 IEM_MC_END();
9786 break;
9787
9788 case IEMMODE_32BIT:
9789 IEM_MC_BEGIN(0, 1);
9790 IEM_MC_LOCAL(uint32_t, u32Value);
9791 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9792 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
9793 IEM_MC_ADVANCE_RIP();
9794 IEM_MC_END();
9795 break;
9796
9797 case IEMMODE_64BIT:
9798 IEM_MC_BEGIN(0, 1);
9799 IEM_MC_LOCAL(uint64_t, u64Value);
9800 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9801 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
9802 IEM_MC_ADVANCE_RIP();
9803 IEM_MC_END();
9804 break;
9805 }
9806 }
9807 else
9808 {
9809 /*
9810 * We're writing a register to memory.
9811 */
9812 switch (pIemCpu->enmEffOpSize)
9813 {
9814 case IEMMODE_16BIT:
9815 IEM_MC_BEGIN(0, 2);
9816 IEM_MC_LOCAL(uint16_t, u16Value);
9817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9819 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9820 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
9821 IEM_MC_ADVANCE_RIP();
9822 IEM_MC_END();
9823 break;
9824
9825 case IEMMODE_32BIT:
9826 IEM_MC_BEGIN(0, 2);
9827 IEM_MC_LOCAL(uint32_t, u32Value);
9828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9830 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9831 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Value);
9832 IEM_MC_ADVANCE_RIP();
9833 IEM_MC_END();
9834 break;
9835
9836 case IEMMODE_64BIT:
9837 IEM_MC_BEGIN(0, 2);
9838 IEM_MC_LOCAL(uint64_t, u64Value);
9839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9841 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg);
9842 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Value);
9843 IEM_MC_ADVANCE_RIP();
9844 IEM_MC_END();
9845 break;
9846 }
9847 }
9848 return VINF_SUCCESS;
9849}
9850
9851
9852/** Opcode 0x8a. */
9853FNIEMOP_DEF(iemOp_mov_Gb_Eb)
9854{
9855 IEMOP_MNEMONIC("mov Gb,Eb");
9856
9857 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9858 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9859
9860 /*
9861 * If rm is denoting a register, no more instruction bytes.
9862 */
9863 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9864 {
9865 IEM_MC_BEGIN(0, 1);
9866 IEM_MC_LOCAL(uint8_t, u8Value);
9867 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9868 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
9869 IEM_MC_ADVANCE_RIP();
9870 IEM_MC_END();
9871 }
9872 else
9873 {
9874 /*
9875 * We're loading a register from memory.
9876 */
9877 IEM_MC_BEGIN(0, 2);
9878 IEM_MC_LOCAL(uint8_t, u8Value);
9879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9881 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
9882 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u8Value);
9883 IEM_MC_ADVANCE_RIP();
9884 IEM_MC_END();
9885 }
9886 return VINF_SUCCESS;
9887}
9888
9889
9890/** Opcode 0x8b. */
9891FNIEMOP_DEF(iemOp_mov_Gv_Ev)
9892{
9893 IEMOP_MNEMONIC("mov Gv,Ev");
9894
9895 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9896 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9897
9898 /*
9899 * If rm is denoting a register, no more instruction bytes.
9900 */
9901 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9902 {
9903 switch (pIemCpu->enmEffOpSize)
9904 {
9905 case IEMMODE_16BIT:
9906 IEM_MC_BEGIN(0, 1);
9907 IEM_MC_LOCAL(uint16_t, u16Value);
9908 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9909 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
9910 IEM_MC_ADVANCE_RIP();
9911 IEM_MC_END();
9912 break;
9913
9914 case IEMMODE_32BIT:
9915 IEM_MC_BEGIN(0, 1);
9916 IEM_MC_LOCAL(uint32_t, u32Value);
9917 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9918 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
9919 IEM_MC_ADVANCE_RIP();
9920 IEM_MC_END();
9921 break;
9922
9923 case IEMMODE_64BIT:
9924 IEM_MC_BEGIN(0, 1);
9925 IEM_MC_LOCAL(uint64_t, u64Value);
9926 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
9927 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
9928 IEM_MC_ADVANCE_RIP();
9929 IEM_MC_END();
9930 break;
9931 }
9932 }
9933 else
9934 {
9935 /*
9936 * We're loading a register from memory.
9937 */
9938 switch (pIemCpu->enmEffOpSize)
9939 {
9940 case IEMMODE_16BIT:
9941 IEM_MC_BEGIN(0, 2);
9942 IEM_MC_LOCAL(uint16_t, u16Value);
9943 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9945 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
9946 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Value);
9947 IEM_MC_ADVANCE_RIP();
9948 IEM_MC_END();
9949 break;
9950
9951 case IEMMODE_32BIT:
9952 IEM_MC_BEGIN(0, 2);
9953 IEM_MC_LOCAL(uint32_t, u32Value);
9954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9956 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
9957 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Value);
9958 IEM_MC_ADVANCE_RIP();
9959 IEM_MC_END();
9960 break;
9961
9962 case IEMMODE_64BIT:
9963 IEM_MC_BEGIN(0, 2);
9964 IEM_MC_LOCAL(uint64_t, u64Value);
9965 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9966 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9967 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
9968 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Value);
9969 IEM_MC_ADVANCE_RIP();
9970 IEM_MC_END();
9971 break;
9972 }
9973 }
9974 return VINF_SUCCESS;
9975}
9976
9977
9978/** Opcode 0x63. */
9979FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
9980{
9981 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
9982 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
9983 if (pIemCpu->enmEffOpSize != IEMMODE_64BIT)
9984 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
9985 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
9986}
9987
9988
9989/** Opcode 0x8c. */
9990FNIEMOP_DEF(iemOp_mov_Ev_Sw)
9991{
9992 IEMOP_MNEMONIC("mov Ev,Sw");
9993
9994 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9995 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
9996
9997 /*
9998 * Check that the destination register exists. The REX.R prefix is ignored.
9999 */
10000 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10001 if ( iSegReg > X86_SREG_GS)
10002 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10003
10004 /*
10005 * If rm is denoting a register, no more instruction bytes.
10006 * In that case, the operand size is respected and the upper bits are
10007 * cleared (starting with some pentium).
10008 */
10009 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10010 {
10011 switch (pIemCpu->enmEffOpSize)
10012 {
10013 case IEMMODE_16BIT:
10014 IEM_MC_BEGIN(0, 1);
10015 IEM_MC_LOCAL(uint16_t, u16Value);
10016 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10017 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Value);
10018 IEM_MC_ADVANCE_RIP();
10019 IEM_MC_END();
10020 break;
10021
10022 case IEMMODE_32BIT:
10023 IEM_MC_BEGIN(0, 1);
10024 IEM_MC_LOCAL(uint32_t, u32Value);
10025 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
10026 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Value);
10027 IEM_MC_ADVANCE_RIP();
10028 IEM_MC_END();
10029 break;
10030
10031 case IEMMODE_64BIT:
10032 IEM_MC_BEGIN(0, 1);
10033 IEM_MC_LOCAL(uint64_t, u64Value);
10034 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
10035 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Value);
10036 IEM_MC_ADVANCE_RIP();
10037 IEM_MC_END();
10038 break;
10039 }
10040 }
10041 else
10042 {
10043 /*
10044 * We're saving the register to memory. The access is word sized
10045 * regardless of operand size prefixes.
10046 */
10047#if 0 /* not necessary */
10048 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10049#endif
10050 IEM_MC_BEGIN(0, 2);
10051 IEM_MC_LOCAL(uint16_t, u16Value);
10052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10054 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
10055 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Value);
10056 IEM_MC_ADVANCE_RIP();
10057 IEM_MC_END();
10058 }
10059 return VINF_SUCCESS;
10060}
10061
10062
10063
10064
10065/** Opcode 0x8d. */
10066FNIEMOP_DEF(iemOp_lea_Gv_M)
10067{
10068 IEMOP_MNEMONIC("lea Gv,M");
10069 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10070 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10071 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10072 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
10073
10074 switch (pIemCpu->enmEffOpSize)
10075 {
10076 case IEMMODE_16BIT:
10077 IEM_MC_BEGIN(0, 2);
10078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10079 IEM_MC_LOCAL(uint16_t, u16Cast);
10080 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10081 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
10082 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Cast);
10083 IEM_MC_ADVANCE_RIP();
10084 IEM_MC_END();
10085 return VINF_SUCCESS;
10086
10087 case IEMMODE_32BIT:
10088 IEM_MC_BEGIN(0, 2);
10089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10090 IEM_MC_LOCAL(uint32_t, u32Cast);
10091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10092 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
10093 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Cast);
10094 IEM_MC_ADVANCE_RIP();
10095 IEM_MC_END();
10096 return VINF_SUCCESS;
10097
10098 case IEMMODE_64BIT:
10099 IEM_MC_BEGIN(0, 1);
10100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10102 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, GCPtrEffSrc);
10103 IEM_MC_ADVANCE_RIP();
10104 IEM_MC_END();
10105 return VINF_SUCCESS;
10106 }
10107 AssertFailedReturn(VERR_IEM_IPE_7);
10108}
10109
10110
10111/** Opcode 0x8e. */
10112FNIEMOP_DEF(iemOp_mov_Sw_Ev)
10113{
10114 IEMOP_MNEMONIC("mov Sw,Ev");
10115
10116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10117 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10118
10119 /*
10120 * The practical operand size is 16-bit.
10121 */
10122#if 0 /* not necessary */
10123 pIemCpu->enmEffOpSize = pIemCpu->enmDefOpSize = IEMMODE_16BIT;
10124#endif
10125
10126 /*
10127 * Check that the destination register exists and can be used with this
10128 * instruction. The REX.R prefix is ignored.
10129 */
10130 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
10131 if ( iSegReg == X86_SREG_CS
10132 || iSegReg > X86_SREG_GS)
10133 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10134
10135 /*
10136 * If rm is denoting a register, no more instruction bytes.
10137 */
10138 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10139 {
10140 IEM_MC_BEGIN(2, 0);
10141 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10142 IEM_MC_ARG(uint16_t, u16Value, 1);
10143 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10144 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10145 IEM_MC_END();
10146 }
10147 else
10148 {
10149 /*
10150 * We're loading the register from memory. The access is word sized
10151 * regardless of operand size prefixes.
10152 */
10153 IEM_MC_BEGIN(2, 1);
10154 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
10155 IEM_MC_ARG(uint16_t, u16Value, 1);
10156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10158 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
10159 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
10160 IEM_MC_END();
10161 }
10162 return VINF_SUCCESS;
10163}
10164
10165
10166/** Opcode 0x8f /0. */
10167FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
10168{
10169 /* This bugger is rather annoying as it requires rSP to be updated before
10170 doing the effective address calculations. Will eventually require a
10171 split between the R/M+SIB decoding and the effective address
10172 calculation - which is something that is required for any attempt at
10173 reusing this code for a recompiler. It may also be good to have if we
10174 need to delay #UD exception caused by invalid lock prefixes.
10175
10176 For now, we'll do a mostly safe interpreter-only implementation here. */
10177 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
10178 * now until tests show it's checked.. */
10179 IEMOP_MNEMONIC("pop Ev");
10180 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
10181
10182 /* Register access is relatively easy and can share code. */
10183 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10184 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
10185
10186 /*
10187 * Memory target.
10188 *
10189 * Intel says that RSP is incremented before it's used in any effective
10190 * address calcuations. This means some serious extra annoyance here since
10191 * we decode and calculate the effective address in one step and like to
10192 * delay committing registers till everything is done.
10193 *
10194 * So, we'll decode and calculate the effective address twice. This will
10195 * require some recoding if turned into a recompiler.
10196 */
10197 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
10198
10199#ifndef TST_IEM_CHECK_MC
10200 /* Calc effective address with modified ESP. */
10201 uint8_t const offOpcodeSaved = pIemCpu->offOpcode;
10202 RTGCPTR GCPtrEff;
10203 VBOXSTRICTRC rcStrict;
10204 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10205 if (rcStrict != VINF_SUCCESS)
10206 return rcStrict;
10207 pIemCpu->offOpcode = offOpcodeSaved;
10208
10209 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx);
10210 uint64_t const RspSaved = pCtx->rsp;
10211 switch (pIemCpu->enmEffOpSize)
10212 {
10213 case IEMMODE_16BIT: iemRegAddToRsp(pIemCpu, pCtx, 2); break;
10214 case IEMMODE_32BIT: iemRegAddToRsp(pIemCpu, pCtx, 4); break;
10215 case IEMMODE_64BIT: iemRegAddToRsp(pIemCpu, pCtx, 8); break;
10216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10217 }
10218 rcStrict = iemOpHlpCalcRmEffAddr(pIemCpu, bRm, 0, &GCPtrEff);
10219 Assert(rcStrict == VINF_SUCCESS);
10220 pCtx->rsp = RspSaved;
10221
10222 /* Perform the operation - this should be CImpl. */
10223 RTUINT64U TmpRsp;
10224 TmpRsp.u = pCtx->rsp;
10225 switch (pIemCpu->enmEffOpSize)
10226 {
10227 case IEMMODE_16BIT:
10228 {
10229 uint16_t u16Value;
10230 rcStrict = iemMemStackPopU16Ex(pIemCpu, &u16Value, &TmpRsp);
10231 if (rcStrict == VINF_SUCCESS)
10232 rcStrict = iemMemStoreDataU16(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u16Value);
10233 break;
10234 }
10235
10236 case IEMMODE_32BIT:
10237 {
10238 uint32_t u32Value;
10239 rcStrict = iemMemStackPopU32Ex(pIemCpu, &u32Value, &TmpRsp);
10240 if (rcStrict == VINF_SUCCESS)
10241 rcStrict = iemMemStoreDataU32(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u32Value);
10242 break;
10243 }
10244
10245 case IEMMODE_64BIT:
10246 {
10247 uint64_t u64Value;
10248 rcStrict = iemMemStackPopU64Ex(pIemCpu, &u64Value, &TmpRsp);
10249 if (rcStrict == VINF_SUCCESS)
10250 rcStrict = iemMemStoreDataU64(pIemCpu, pIemCpu->iEffSeg, GCPtrEff, u64Value);
10251 break;
10252 }
10253
10254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10255 }
10256 if (rcStrict == VINF_SUCCESS)
10257 {
10258 pCtx->rsp = TmpRsp.u;
10259 iemRegUpdateRipAndClearRF(pIemCpu);
10260 }
10261 return rcStrict;
10262
10263#else
10264 return VERR_IEM_IPE_2;
10265#endif
10266}
10267
10268
10269/** Opcode 0x8f. */
10270FNIEMOP_DEF(iemOp_Grp1A)
10271{
10272 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10273 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
10274 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
10275
10276 /* AMD has defined /1 thru /7 as XOP prefix (similar to three byte VEX). */
10277 /** @todo XOP decoding. */
10278 IEMOP_MNEMONIC("3-byte-xop");
10279 return IEMOP_RAISE_INVALID_OPCODE();
10280}
10281
10282
10283/**
10284 * Common 'xchg reg,rAX' helper.
10285 */
10286FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
10287{
10288 IEMOP_HLP_NO_LOCK_PREFIX();
10289
10290 iReg |= pIemCpu->uRexB;
10291 switch (pIemCpu->enmEffOpSize)
10292 {
10293 case IEMMODE_16BIT:
10294 IEM_MC_BEGIN(0, 2);
10295 IEM_MC_LOCAL(uint16_t, u16Tmp1);
10296 IEM_MC_LOCAL(uint16_t, u16Tmp2);
10297 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
10298 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
10299 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
10300 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
10301 IEM_MC_ADVANCE_RIP();
10302 IEM_MC_END();
10303 return VINF_SUCCESS;
10304
10305 case IEMMODE_32BIT:
10306 IEM_MC_BEGIN(0, 2);
10307 IEM_MC_LOCAL(uint32_t, u32Tmp1);
10308 IEM_MC_LOCAL(uint32_t, u32Tmp2);
10309 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
10310 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
10311 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
10312 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
10313 IEM_MC_ADVANCE_RIP();
10314 IEM_MC_END();
10315 return VINF_SUCCESS;
10316
10317 case IEMMODE_64BIT:
10318 IEM_MC_BEGIN(0, 2);
10319 IEM_MC_LOCAL(uint64_t, u64Tmp1);
10320 IEM_MC_LOCAL(uint64_t, u64Tmp2);
10321 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
10322 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
10323 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
10324 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
10325 IEM_MC_ADVANCE_RIP();
10326 IEM_MC_END();
10327 return VINF_SUCCESS;
10328
10329 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10330 }
10331}
10332
10333
10334/** Opcode 0x90. */
10335FNIEMOP_DEF(iemOp_nop)
10336{
10337 /* R8/R8D and RAX/EAX can be exchanged. */
10338 if (pIemCpu->fPrefixes & IEM_OP_PRF_REX_B)
10339 {
10340 IEMOP_MNEMONIC("xchg r8,rAX");
10341 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
10342 }
10343
10344 if (pIemCpu->fPrefixes & IEM_OP_PRF_LOCK)
10345 IEMOP_MNEMONIC("pause");
10346 else
10347 IEMOP_MNEMONIC("nop");
10348 IEM_MC_BEGIN(0, 0);
10349 IEM_MC_ADVANCE_RIP();
10350 IEM_MC_END();
10351 return VINF_SUCCESS;
10352}
10353
10354
10355/** Opcode 0x91. */
10356FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
10357{
10358 IEMOP_MNEMONIC("xchg rCX,rAX");
10359 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
10360}
10361
10362
10363/** Opcode 0x92. */
10364FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
10365{
10366 IEMOP_MNEMONIC("xchg rDX,rAX");
10367 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
10368}
10369
10370
10371/** Opcode 0x93. */
10372FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
10373{
10374 IEMOP_MNEMONIC("xchg rBX,rAX");
10375 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
10376}
10377
10378
10379/** Opcode 0x94. */
10380FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
10381{
10382 IEMOP_MNEMONIC("xchg rSX,rAX");
10383 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
10384}
10385
10386
10387/** Opcode 0x95. */
10388FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
10389{
10390 IEMOP_MNEMONIC("xchg rBP,rAX");
10391 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
10392}
10393
10394
10395/** Opcode 0x96. */
10396FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
10397{
10398 IEMOP_MNEMONIC("xchg rSI,rAX");
10399 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
10400}
10401
10402
10403/** Opcode 0x97. */
10404FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
10405{
10406 IEMOP_MNEMONIC("xchg rDI,rAX");
10407 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
10408}
10409
10410
10411/** Opcode 0x98. */
10412FNIEMOP_DEF(iemOp_cbw)
10413{
10414 IEMOP_HLP_NO_LOCK_PREFIX();
10415 switch (pIemCpu->enmEffOpSize)
10416 {
10417 case IEMMODE_16BIT:
10418 IEMOP_MNEMONIC("cbw");
10419 IEM_MC_BEGIN(0, 1);
10420 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
10421 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
10422 } IEM_MC_ELSE() {
10423 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
10424 } IEM_MC_ENDIF();
10425 IEM_MC_ADVANCE_RIP();
10426 IEM_MC_END();
10427 return VINF_SUCCESS;
10428
10429 case IEMMODE_32BIT:
10430 IEMOP_MNEMONIC("cwde");
10431 IEM_MC_BEGIN(0, 1);
10432 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10433 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
10434 } IEM_MC_ELSE() {
10435 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
10436 } IEM_MC_ENDIF();
10437 IEM_MC_ADVANCE_RIP();
10438 IEM_MC_END();
10439 return VINF_SUCCESS;
10440
10441 case IEMMODE_64BIT:
10442 IEMOP_MNEMONIC("cdqe");
10443 IEM_MC_BEGIN(0, 1);
10444 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10445 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
10446 } IEM_MC_ELSE() {
10447 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
10448 } IEM_MC_ENDIF();
10449 IEM_MC_ADVANCE_RIP();
10450 IEM_MC_END();
10451 return VINF_SUCCESS;
10452
10453 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10454 }
10455}
10456
10457
10458/** Opcode 0x99. */
10459FNIEMOP_DEF(iemOp_cwd)
10460{
10461 IEMOP_HLP_NO_LOCK_PREFIX();
10462 switch (pIemCpu->enmEffOpSize)
10463 {
10464 case IEMMODE_16BIT:
10465 IEMOP_MNEMONIC("cwd");
10466 IEM_MC_BEGIN(0, 1);
10467 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
10468 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
10469 } IEM_MC_ELSE() {
10470 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
10471 } IEM_MC_ENDIF();
10472 IEM_MC_ADVANCE_RIP();
10473 IEM_MC_END();
10474 return VINF_SUCCESS;
10475
10476 case IEMMODE_32BIT:
10477 IEMOP_MNEMONIC("cdq");
10478 IEM_MC_BEGIN(0, 1);
10479 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
10480 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
10481 } IEM_MC_ELSE() {
10482 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
10483 } IEM_MC_ENDIF();
10484 IEM_MC_ADVANCE_RIP();
10485 IEM_MC_END();
10486 return VINF_SUCCESS;
10487
10488 case IEMMODE_64BIT:
10489 IEMOP_MNEMONIC("cqo");
10490 IEM_MC_BEGIN(0, 1);
10491 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
10492 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
10493 } IEM_MC_ELSE() {
10494 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
10495 } IEM_MC_ENDIF();
10496 IEM_MC_ADVANCE_RIP();
10497 IEM_MC_END();
10498 return VINF_SUCCESS;
10499
10500 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10501 }
10502}
10503
10504
10505/** Opcode 0x9a. */
10506FNIEMOP_DEF(iemOp_call_Ap)
10507{
10508 IEMOP_MNEMONIC("call Ap");
10509 IEMOP_HLP_NO_64BIT();
10510
10511 /* Decode the far pointer address and pass it on to the far call C implementation. */
10512 uint32_t offSeg;
10513 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
10514 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10515 else
10516 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10517 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10519 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pIemCpu->enmEffOpSize);
10520}
10521
10522
10523/** Opcode 0x9b. (aka fwait) */
10524FNIEMOP_DEF(iemOp_wait)
10525{
10526 IEMOP_MNEMONIC("wait");
10527 IEMOP_HLP_NO_LOCK_PREFIX();
10528
10529 IEM_MC_BEGIN(0, 0);
10530 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10531 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10532 IEM_MC_ADVANCE_RIP();
10533 IEM_MC_END();
10534 return VINF_SUCCESS;
10535}
10536
10537
10538/** Opcode 0x9c. */
10539FNIEMOP_DEF(iemOp_pushf_Fv)
10540{
10541 IEMOP_HLP_NO_LOCK_PREFIX();
10542 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10543 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pIemCpu->enmEffOpSize);
10544}
10545
10546
10547/** Opcode 0x9d. */
10548FNIEMOP_DEF(iemOp_popf_Fv)
10549{
10550 IEMOP_HLP_NO_LOCK_PREFIX();
10551 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10552 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pIemCpu->enmEffOpSize);
10553}
10554
10555
10556/** Opcode 0x9e. */
10557FNIEMOP_DEF(iemOp_sahf)
10558{
10559 IEMOP_MNEMONIC("sahf");
10560 IEMOP_HLP_NO_LOCK_PREFIX();
10561 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10562 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10563 return IEMOP_RAISE_INVALID_OPCODE();
10564 IEM_MC_BEGIN(0, 2);
10565 IEM_MC_LOCAL(uint32_t, u32Flags);
10566 IEM_MC_LOCAL(uint32_t, EFlags);
10567 IEM_MC_FETCH_EFLAGS(EFlags);
10568 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
10569 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
10570 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
10571 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
10572 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
10573 IEM_MC_COMMIT_EFLAGS(EFlags);
10574 IEM_MC_ADVANCE_RIP();
10575 IEM_MC_END();
10576 return VINF_SUCCESS;
10577}
10578
10579
10580/** Opcode 0x9f. */
10581FNIEMOP_DEF(iemOp_lahf)
10582{
10583 IEMOP_MNEMONIC("lahf");
10584 IEMOP_HLP_NO_LOCK_PREFIX();
10585 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
10586 && !IEM_GET_GUEST_CPU_FEATURES(pIemCpu)->fLahfSahf)
10587 return IEMOP_RAISE_INVALID_OPCODE();
10588 IEM_MC_BEGIN(0, 1);
10589 IEM_MC_LOCAL(uint8_t, u8Flags);
10590 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
10591 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
10592 IEM_MC_ADVANCE_RIP();
10593 IEM_MC_END();
10594 return VINF_SUCCESS;
10595}
10596
10597
10598/**
10599 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
10600 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
10601 * prefixes. Will return on failures.
10602 * @param a_GCPtrMemOff The variable to store the offset in.
10603 */
10604#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
10605 do \
10606 { \
10607 switch (pIemCpu->enmEffAddrMode) \
10608 { \
10609 case IEMMODE_16BIT: \
10610 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
10611 break; \
10612 case IEMMODE_32BIT: \
10613 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
10614 break; \
10615 case IEMMODE_64BIT: \
10616 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
10617 break; \
10618 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
10619 } \
10620 IEMOP_HLP_NO_LOCK_PREFIX(); \
10621 } while (0)
10622
10623/** Opcode 0xa0. */
10624FNIEMOP_DEF(iemOp_mov_Al_Ob)
10625{
10626 /*
10627 * Get the offset and fend of lock prefixes.
10628 */
10629 RTGCPTR GCPtrMemOff;
10630 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10631
10632 /*
10633 * Fetch AL.
10634 */
10635 IEM_MC_BEGIN(0,1);
10636 IEM_MC_LOCAL(uint8_t, u8Tmp);
10637 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10638 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
10639 IEM_MC_ADVANCE_RIP();
10640 IEM_MC_END();
10641 return VINF_SUCCESS;
10642}
10643
10644
10645/** Opcode 0xa1. */
10646FNIEMOP_DEF(iemOp_mov_rAX_Ov)
10647{
10648 /*
10649 * Get the offset and fend of lock prefixes.
10650 */
10651 IEMOP_MNEMONIC("mov rAX,Ov");
10652 RTGCPTR GCPtrMemOff;
10653 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10654
10655 /*
10656 * Fetch rAX.
10657 */
10658 switch (pIemCpu->enmEffOpSize)
10659 {
10660 case IEMMODE_16BIT:
10661 IEM_MC_BEGIN(0,1);
10662 IEM_MC_LOCAL(uint16_t, u16Tmp);
10663 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10664 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
10665 IEM_MC_ADVANCE_RIP();
10666 IEM_MC_END();
10667 return VINF_SUCCESS;
10668
10669 case IEMMODE_32BIT:
10670 IEM_MC_BEGIN(0,1);
10671 IEM_MC_LOCAL(uint32_t, u32Tmp);
10672 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10673 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
10674 IEM_MC_ADVANCE_RIP();
10675 IEM_MC_END();
10676 return VINF_SUCCESS;
10677
10678 case IEMMODE_64BIT:
10679 IEM_MC_BEGIN(0,1);
10680 IEM_MC_LOCAL(uint64_t, u64Tmp);
10681 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrMemOff);
10682 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
10683 IEM_MC_ADVANCE_RIP();
10684 IEM_MC_END();
10685 return VINF_SUCCESS;
10686
10687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10688 }
10689}
10690
10691
10692/** Opcode 0xa2. */
10693FNIEMOP_DEF(iemOp_mov_Ob_AL)
10694{
10695 /*
10696 * Get the offset and fend of lock prefixes.
10697 */
10698 RTGCPTR GCPtrMemOff;
10699 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10700
10701 /*
10702 * Store AL.
10703 */
10704 IEM_MC_BEGIN(0,1);
10705 IEM_MC_LOCAL(uint8_t, u8Tmp);
10706 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
10707 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrMemOff, u8Tmp);
10708 IEM_MC_ADVANCE_RIP();
10709 IEM_MC_END();
10710 return VINF_SUCCESS;
10711}
10712
10713
10714/** Opcode 0xa3. */
10715FNIEMOP_DEF(iemOp_mov_Ov_rAX)
10716{
10717 /*
10718 * Get the offset and fend of lock prefixes.
10719 */
10720 RTGCPTR GCPtrMemOff;
10721 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
10722
10723 /*
10724 * Store rAX.
10725 */
10726 switch (pIemCpu->enmEffOpSize)
10727 {
10728 case IEMMODE_16BIT:
10729 IEM_MC_BEGIN(0,1);
10730 IEM_MC_LOCAL(uint16_t, u16Tmp);
10731 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
10732 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrMemOff, u16Tmp);
10733 IEM_MC_ADVANCE_RIP();
10734 IEM_MC_END();
10735 return VINF_SUCCESS;
10736
10737 case IEMMODE_32BIT:
10738 IEM_MC_BEGIN(0,1);
10739 IEM_MC_LOCAL(uint32_t, u32Tmp);
10740 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
10741 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrMemOff, u32Tmp);
10742 IEM_MC_ADVANCE_RIP();
10743 IEM_MC_END();
10744 return VINF_SUCCESS;
10745
10746 case IEMMODE_64BIT:
10747 IEM_MC_BEGIN(0,1);
10748 IEM_MC_LOCAL(uint64_t, u64Tmp);
10749 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
10750 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrMemOff, u64Tmp);
10751 IEM_MC_ADVANCE_RIP();
10752 IEM_MC_END();
10753 return VINF_SUCCESS;
10754
10755 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10756 }
10757}
10758
10759/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
10760#define IEM_MOVS_CASE(ValBits, AddrBits) \
10761 IEM_MC_BEGIN(0, 2); \
10762 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
10763 IEM_MC_LOCAL(RTGCPTR, uAddr); \
10764 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
10765 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
10766 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
10767 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
10768 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
10769 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10770 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10771 } IEM_MC_ELSE() { \
10772 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10773 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10774 } IEM_MC_ENDIF(); \
10775 IEM_MC_ADVANCE_RIP(); \
10776 IEM_MC_END();
10777
10778/** Opcode 0xa4. */
10779FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
10780{
10781 IEMOP_HLP_NO_LOCK_PREFIX();
10782
10783 /*
10784 * Use the C implementation if a repeat prefix is encountered.
10785 */
10786 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10787 {
10788 IEMOP_MNEMONIC("rep movsb Xb,Yb");
10789 switch (pIemCpu->enmEffAddrMode)
10790 {
10791 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pIemCpu->iEffSeg);
10792 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pIemCpu->iEffSeg);
10793 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pIemCpu->iEffSeg);
10794 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10795 }
10796 }
10797 IEMOP_MNEMONIC("movsb Xb,Yb");
10798
10799 /*
10800 * Sharing case implementation with movs[wdq] below.
10801 */
10802 switch (pIemCpu->enmEffAddrMode)
10803 {
10804 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
10805 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
10806 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
10807 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10808 }
10809 return VINF_SUCCESS;
10810}
10811
10812
10813/** Opcode 0xa5. */
10814FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
10815{
10816 IEMOP_HLP_NO_LOCK_PREFIX();
10817
10818 /*
10819 * Use the C implementation if a repeat prefix is encountered.
10820 */
10821 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
10822 {
10823 IEMOP_MNEMONIC("rep movs Xv,Yv");
10824 switch (pIemCpu->enmEffOpSize)
10825 {
10826 case IEMMODE_16BIT:
10827 switch (pIemCpu->enmEffAddrMode)
10828 {
10829 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pIemCpu->iEffSeg);
10830 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pIemCpu->iEffSeg);
10831 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pIemCpu->iEffSeg);
10832 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10833 }
10834 break;
10835 case IEMMODE_32BIT:
10836 switch (pIemCpu->enmEffAddrMode)
10837 {
10838 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pIemCpu->iEffSeg);
10839 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pIemCpu->iEffSeg);
10840 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pIemCpu->iEffSeg);
10841 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10842 }
10843 case IEMMODE_64BIT:
10844 switch (pIemCpu->enmEffAddrMode)
10845 {
10846 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
10847 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pIemCpu->iEffSeg);
10848 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pIemCpu->iEffSeg);
10849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10850 }
10851 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10852 }
10853 }
10854 IEMOP_MNEMONIC("movs Xv,Yv");
10855
10856 /*
10857 * Annoying double switch here.
10858 * Using ugly macro for implementing the cases, sharing it with movsb.
10859 */
10860 switch (pIemCpu->enmEffOpSize)
10861 {
10862 case IEMMODE_16BIT:
10863 switch (pIemCpu->enmEffAddrMode)
10864 {
10865 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
10866 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
10867 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
10868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10869 }
10870 break;
10871
10872 case IEMMODE_32BIT:
10873 switch (pIemCpu->enmEffAddrMode)
10874 {
10875 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
10876 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
10877 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
10878 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10879 }
10880 break;
10881
10882 case IEMMODE_64BIT:
10883 switch (pIemCpu->enmEffAddrMode)
10884 {
10885 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
10886 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
10887 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
10888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10889 }
10890 break;
10891 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10892 }
10893 return VINF_SUCCESS;
10894}
10895
10896#undef IEM_MOVS_CASE
10897
10898/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
10899#define IEM_CMPS_CASE(ValBits, AddrBits) \
10900 IEM_MC_BEGIN(3, 3); \
10901 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
10902 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
10903 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
10904 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
10905 IEM_MC_LOCAL(RTGCPTR, uAddr); \
10906 \
10907 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
10908 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pIemCpu->iEffSeg, uAddr); \
10909 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
10910 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
10911 IEM_MC_REF_LOCAL(puValue1, uValue1); \
10912 IEM_MC_REF_EFLAGS(pEFlags); \
10913 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
10914 \
10915 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
10916 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10917 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10918 } IEM_MC_ELSE() { \
10919 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
10920 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
10921 } IEM_MC_ENDIF(); \
10922 IEM_MC_ADVANCE_RIP(); \
10923 IEM_MC_END(); \
10924
10925/** Opcode 0xa6. */
10926FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
10927{
10928 IEMOP_HLP_NO_LOCK_PREFIX();
10929
10930 /*
10931 * Use the C implementation if a repeat prefix is encountered.
10932 */
10933 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
10934 {
10935 IEMOP_MNEMONIC("repe cmps Xb,Yb");
10936 switch (pIemCpu->enmEffAddrMode)
10937 {
10938 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pIemCpu->iEffSeg);
10939 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pIemCpu->iEffSeg);
10940 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pIemCpu->iEffSeg);
10941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10942 }
10943 }
10944 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
10945 {
10946 IEMOP_MNEMONIC("repe cmps Xb,Yb");
10947 switch (pIemCpu->enmEffAddrMode)
10948 {
10949 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pIemCpu->iEffSeg);
10950 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pIemCpu->iEffSeg);
10951 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pIemCpu->iEffSeg);
10952 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10953 }
10954 }
10955 IEMOP_MNEMONIC("cmps Xb,Yb");
10956
10957 /*
10958 * Sharing case implementation with cmps[wdq] below.
10959 */
10960 switch (pIemCpu->enmEffAddrMode)
10961 {
10962 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
10963 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
10964 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
10965 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10966 }
10967 return VINF_SUCCESS;
10968
10969}
10970
10971
10972/** Opcode 0xa7. */
10973FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
10974{
10975 IEMOP_HLP_NO_LOCK_PREFIX();
10976
10977 /*
10978 * Use the C implementation if a repeat prefix is encountered.
10979 */
10980 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
10981 {
10982 IEMOP_MNEMONIC("repe cmps Xv,Yv");
10983 switch (pIemCpu->enmEffOpSize)
10984 {
10985 case IEMMODE_16BIT:
10986 switch (pIemCpu->enmEffAddrMode)
10987 {
10988 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pIemCpu->iEffSeg);
10989 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pIemCpu->iEffSeg);
10990 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pIemCpu->iEffSeg);
10991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10992 }
10993 break;
10994 case IEMMODE_32BIT:
10995 switch (pIemCpu->enmEffAddrMode)
10996 {
10997 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pIemCpu->iEffSeg);
10998 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pIemCpu->iEffSeg);
10999 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pIemCpu->iEffSeg);
11000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11001 }
11002 case IEMMODE_64BIT:
11003 switch (pIemCpu->enmEffAddrMode)
11004 {
11005 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
11006 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pIemCpu->iEffSeg);
11007 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pIemCpu->iEffSeg);
11008 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11009 }
11010 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11011 }
11012 }
11013
11014 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11015 {
11016 IEMOP_MNEMONIC("repne cmps Xv,Yv");
11017 switch (pIemCpu->enmEffOpSize)
11018 {
11019 case IEMMODE_16BIT:
11020 switch (pIemCpu->enmEffAddrMode)
11021 {
11022 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pIemCpu->iEffSeg);
11023 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pIemCpu->iEffSeg);
11024 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pIemCpu->iEffSeg);
11025 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11026 }
11027 break;
11028 case IEMMODE_32BIT:
11029 switch (pIemCpu->enmEffAddrMode)
11030 {
11031 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pIemCpu->iEffSeg);
11032 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pIemCpu->iEffSeg);
11033 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pIemCpu->iEffSeg);
11034 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11035 }
11036 case IEMMODE_64BIT:
11037 switch (pIemCpu->enmEffAddrMode)
11038 {
11039 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
11040 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pIemCpu->iEffSeg);
11041 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pIemCpu->iEffSeg);
11042 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11043 }
11044 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11045 }
11046 }
11047
11048 IEMOP_MNEMONIC("cmps Xv,Yv");
11049
11050 /*
11051 * Annoying double switch here.
11052 * Using ugly macro for implementing the cases, sharing it with cmpsb.
11053 */
11054 switch (pIemCpu->enmEffOpSize)
11055 {
11056 case IEMMODE_16BIT:
11057 switch (pIemCpu->enmEffAddrMode)
11058 {
11059 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
11060 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
11061 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
11062 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11063 }
11064 break;
11065
11066 case IEMMODE_32BIT:
11067 switch (pIemCpu->enmEffAddrMode)
11068 {
11069 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
11070 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
11071 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
11072 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11073 }
11074 break;
11075
11076 case IEMMODE_64BIT:
11077 switch (pIemCpu->enmEffAddrMode)
11078 {
11079 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11080 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
11081 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
11082 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11083 }
11084 break;
11085 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11086 }
11087 return VINF_SUCCESS;
11088
11089}
11090
11091#undef IEM_CMPS_CASE
11092
11093/** Opcode 0xa8. */
11094FNIEMOP_DEF(iemOp_test_AL_Ib)
11095{
11096 IEMOP_MNEMONIC("test al,Ib");
11097 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11098 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
11099}
11100
11101
11102/** Opcode 0xa9. */
11103FNIEMOP_DEF(iemOp_test_eAX_Iz)
11104{
11105 IEMOP_MNEMONIC("test rAX,Iz");
11106 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11107 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
11108}
11109
11110
11111/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
11112#define IEM_STOS_CASE(ValBits, AddrBits) \
11113 IEM_MC_BEGIN(0, 2); \
11114 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11115 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11116 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
11117 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11118 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
11119 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11120 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11121 } IEM_MC_ELSE() { \
11122 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11123 } IEM_MC_ENDIF(); \
11124 IEM_MC_ADVANCE_RIP(); \
11125 IEM_MC_END(); \
11126
11127/** Opcode 0xaa. */
11128FNIEMOP_DEF(iemOp_stosb_Yb_AL)
11129{
11130 IEMOP_HLP_NO_LOCK_PREFIX();
11131
11132 /*
11133 * Use the C implementation if a repeat prefix is encountered.
11134 */
11135 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11136 {
11137 IEMOP_MNEMONIC("rep stos Yb,al");
11138 switch (pIemCpu->enmEffAddrMode)
11139 {
11140 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
11141 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
11142 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
11143 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11144 }
11145 }
11146 IEMOP_MNEMONIC("stos Yb,al");
11147
11148 /*
11149 * Sharing case implementation with stos[wdq] below.
11150 */
11151 switch (pIemCpu->enmEffAddrMode)
11152 {
11153 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
11154 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
11155 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
11156 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11157 }
11158 return VINF_SUCCESS;
11159}
11160
11161
11162/** Opcode 0xab. */
11163FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
11164{
11165 IEMOP_HLP_NO_LOCK_PREFIX();
11166
11167 /*
11168 * Use the C implementation if a repeat prefix is encountered.
11169 */
11170 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11171 {
11172 IEMOP_MNEMONIC("rep stos Yv,rAX");
11173 switch (pIemCpu->enmEffOpSize)
11174 {
11175 case IEMMODE_16BIT:
11176 switch (pIemCpu->enmEffAddrMode)
11177 {
11178 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
11179 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
11180 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
11181 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11182 }
11183 break;
11184 case IEMMODE_32BIT:
11185 switch (pIemCpu->enmEffAddrMode)
11186 {
11187 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
11188 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
11189 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
11190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11191 }
11192 case IEMMODE_64BIT:
11193 switch (pIemCpu->enmEffAddrMode)
11194 {
11195 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
11196 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
11197 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
11198 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11199 }
11200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11201 }
11202 }
11203 IEMOP_MNEMONIC("stos Yv,rAX");
11204
11205 /*
11206 * Annoying double switch here.
11207 * Using ugly macro for implementing the cases, sharing it with stosb.
11208 */
11209 switch (pIemCpu->enmEffOpSize)
11210 {
11211 case IEMMODE_16BIT:
11212 switch (pIemCpu->enmEffAddrMode)
11213 {
11214 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
11215 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
11216 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
11217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11218 }
11219 break;
11220
11221 case IEMMODE_32BIT:
11222 switch (pIemCpu->enmEffAddrMode)
11223 {
11224 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
11225 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
11226 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
11227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11228 }
11229 break;
11230
11231 case IEMMODE_64BIT:
11232 switch (pIemCpu->enmEffAddrMode)
11233 {
11234 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11235 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
11236 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
11237 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11238 }
11239 break;
11240 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11241 }
11242 return VINF_SUCCESS;
11243}
11244
11245#undef IEM_STOS_CASE
11246
11247/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
11248#define IEM_LODS_CASE(ValBits, AddrBits) \
11249 IEM_MC_BEGIN(0, 2); \
11250 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
11251 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11252 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
11253 IEM_MC_FETCH_MEM_U##ValBits(uValue, pIemCpu->iEffSeg, uAddr); \
11254 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
11255 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11256 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11257 } IEM_MC_ELSE() { \
11258 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
11259 } IEM_MC_ENDIF(); \
11260 IEM_MC_ADVANCE_RIP(); \
11261 IEM_MC_END();
11262
11263/** Opcode 0xac. */
11264FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
11265{
11266 IEMOP_HLP_NO_LOCK_PREFIX();
11267
11268 /*
11269 * Use the C implementation if a repeat prefix is encountered.
11270 */
11271 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11272 {
11273 IEMOP_MNEMONIC("rep lodsb al,Xb");
11274 switch (pIemCpu->enmEffAddrMode)
11275 {
11276 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pIemCpu->iEffSeg);
11277 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pIemCpu->iEffSeg);
11278 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pIemCpu->iEffSeg);
11279 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11280 }
11281 }
11282 IEMOP_MNEMONIC("lodsb al,Xb");
11283
11284 /*
11285 * Sharing case implementation with stos[wdq] below.
11286 */
11287 switch (pIemCpu->enmEffAddrMode)
11288 {
11289 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
11290 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
11291 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
11292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11293 }
11294 return VINF_SUCCESS;
11295}
11296
11297
11298/** Opcode 0xad. */
11299FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
11300{
11301 IEMOP_HLP_NO_LOCK_PREFIX();
11302
11303 /*
11304 * Use the C implementation if a repeat prefix is encountered.
11305 */
11306 if (pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
11307 {
11308 IEMOP_MNEMONIC("rep lods rAX,Xv");
11309 switch (pIemCpu->enmEffOpSize)
11310 {
11311 case IEMMODE_16BIT:
11312 switch (pIemCpu->enmEffAddrMode)
11313 {
11314 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pIemCpu->iEffSeg);
11315 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pIemCpu->iEffSeg);
11316 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pIemCpu->iEffSeg);
11317 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11318 }
11319 break;
11320 case IEMMODE_32BIT:
11321 switch (pIemCpu->enmEffAddrMode)
11322 {
11323 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pIemCpu->iEffSeg);
11324 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pIemCpu->iEffSeg);
11325 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pIemCpu->iEffSeg);
11326 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11327 }
11328 case IEMMODE_64BIT:
11329 switch (pIemCpu->enmEffAddrMode)
11330 {
11331 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
11332 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pIemCpu->iEffSeg);
11333 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pIemCpu->iEffSeg);
11334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11335 }
11336 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11337 }
11338 }
11339 IEMOP_MNEMONIC("lods rAX,Xv");
11340
11341 /*
11342 * Annoying double switch here.
11343 * Using ugly macro for implementing the cases, sharing it with lodsb.
11344 */
11345 switch (pIemCpu->enmEffOpSize)
11346 {
11347 case IEMMODE_16BIT:
11348 switch (pIemCpu->enmEffAddrMode)
11349 {
11350 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
11351 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
11352 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
11353 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11354 }
11355 break;
11356
11357 case IEMMODE_32BIT:
11358 switch (pIemCpu->enmEffAddrMode)
11359 {
11360 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
11361 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
11362 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
11363 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11364 }
11365 break;
11366
11367 case IEMMODE_64BIT:
11368 switch (pIemCpu->enmEffAddrMode)
11369 {
11370 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11371 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
11372 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
11373 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11374 }
11375 break;
11376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11377 }
11378 return VINF_SUCCESS;
11379}
11380
11381#undef IEM_LODS_CASE
11382
11383/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
11384#define IEM_SCAS_CASE(ValBits, AddrBits) \
11385 IEM_MC_BEGIN(3, 2); \
11386 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
11387 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
11388 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
11389 IEM_MC_LOCAL(RTGCPTR, uAddr); \
11390 \
11391 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
11392 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
11393 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
11394 IEM_MC_REF_EFLAGS(pEFlags); \
11395 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
11396 \
11397 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
11398 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11399 } IEM_MC_ELSE() { \
11400 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
11401 } IEM_MC_ENDIF(); \
11402 IEM_MC_ADVANCE_RIP(); \
11403 IEM_MC_END();
11404
11405/** Opcode 0xae. */
11406FNIEMOP_DEF(iemOp_scasb_AL_Xb)
11407{
11408 IEMOP_HLP_NO_LOCK_PREFIX();
11409
11410 /*
11411 * Use the C implementation if a repeat prefix is encountered.
11412 */
11413 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11414 {
11415 IEMOP_MNEMONIC("repe scasb al,Xb");
11416 switch (pIemCpu->enmEffAddrMode)
11417 {
11418 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
11419 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
11420 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
11421 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11422 }
11423 }
11424 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11425 {
11426 IEMOP_MNEMONIC("repne scasb al,Xb");
11427 switch (pIemCpu->enmEffAddrMode)
11428 {
11429 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
11430 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
11431 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
11432 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11433 }
11434 }
11435 IEMOP_MNEMONIC("scasb al,Xb");
11436
11437 /*
11438 * Sharing case implementation with stos[wdq] below.
11439 */
11440 switch (pIemCpu->enmEffAddrMode)
11441 {
11442 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
11443 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
11444 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
11445 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11446 }
11447 return VINF_SUCCESS;
11448}
11449
11450
11451/** Opcode 0xaf. */
11452FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
11453{
11454 IEMOP_HLP_NO_LOCK_PREFIX();
11455
11456 /*
11457 * Use the C implementation if a repeat prefix is encountered.
11458 */
11459 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPZ)
11460 {
11461 IEMOP_MNEMONIC("repe scas rAX,Xv");
11462 switch (pIemCpu->enmEffOpSize)
11463 {
11464 case IEMMODE_16BIT:
11465 switch (pIemCpu->enmEffAddrMode)
11466 {
11467 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
11468 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
11469 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
11470 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11471 }
11472 break;
11473 case IEMMODE_32BIT:
11474 switch (pIemCpu->enmEffAddrMode)
11475 {
11476 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
11477 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
11478 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
11479 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11480 }
11481 case IEMMODE_64BIT:
11482 switch (pIemCpu->enmEffAddrMode)
11483 {
11484 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
11485 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
11486 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
11487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11488 }
11489 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11490 }
11491 }
11492 if (pIemCpu->fPrefixes & IEM_OP_PRF_REPNZ)
11493 {
11494 IEMOP_MNEMONIC("repne scas rAX,Xv");
11495 switch (pIemCpu->enmEffOpSize)
11496 {
11497 case IEMMODE_16BIT:
11498 switch (pIemCpu->enmEffAddrMode)
11499 {
11500 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
11501 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
11502 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
11503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11504 }
11505 break;
11506 case IEMMODE_32BIT:
11507 switch (pIemCpu->enmEffAddrMode)
11508 {
11509 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
11510 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
11511 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
11512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11513 }
11514 case IEMMODE_64BIT:
11515 switch (pIemCpu->enmEffAddrMode)
11516 {
11517 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
11518 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
11519 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
11520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11521 }
11522 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11523 }
11524 }
11525 IEMOP_MNEMONIC("scas rAX,Xv");
11526
11527 /*
11528 * Annoying double switch here.
11529 * Using ugly macro for implementing the cases, sharing it with scasb.
11530 */
11531 switch (pIemCpu->enmEffOpSize)
11532 {
11533 case IEMMODE_16BIT:
11534 switch (pIemCpu->enmEffAddrMode)
11535 {
11536 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
11537 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
11538 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
11539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11540 }
11541 break;
11542
11543 case IEMMODE_32BIT:
11544 switch (pIemCpu->enmEffAddrMode)
11545 {
11546 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
11547 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
11548 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
11549 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11550 }
11551 break;
11552
11553 case IEMMODE_64BIT:
11554 switch (pIemCpu->enmEffAddrMode)
11555 {
11556 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
11557 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
11558 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
11559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11560 }
11561 break;
11562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11563 }
11564 return VINF_SUCCESS;
11565}
11566
11567#undef IEM_SCAS_CASE
11568
11569/**
11570 * Common 'mov r8, imm8' helper.
11571 */
11572FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
11573{
11574 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11575 IEMOP_HLP_NO_LOCK_PREFIX();
11576
11577 IEM_MC_BEGIN(0, 1);
11578 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
11579 IEM_MC_STORE_GREG_U8(iReg, u8Value);
11580 IEM_MC_ADVANCE_RIP();
11581 IEM_MC_END();
11582
11583 return VINF_SUCCESS;
11584}
11585
11586
11587/** Opcode 0xb0. */
11588FNIEMOP_DEF(iemOp_mov_AL_Ib)
11589{
11590 IEMOP_MNEMONIC("mov AL,Ib");
11591 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pIemCpu->uRexB);
11592}
11593
11594
11595/** Opcode 0xb1. */
11596FNIEMOP_DEF(iemOp_CL_Ib)
11597{
11598 IEMOP_MNEMONIC("mov CL,Ib");
11599 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pIemCpu->uRexB);
11600}
11601
11602
11603/** Opcode 0xb2. */
11604FNIEMOP_DEF(iemOp_DL_Ib)
11605{
11606 IEMOP_MNEMONIC("mov DL,Ib");
11607 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pIemCpu->uRexB);
11608}
11609
11610
11611/** Opcode 0xb3. */
11612FNIEMOP_DEF(iemOp_BL_Ib)
11613{
11614 IEMOP_MNEMONIC("mov BL,Ib");
11615 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pIemCpu->uRexB);
11616}
11617
11618
11619/** Opcode 0xb4. */
11620FNIEMOP_DEF(iemOp_mov_AH_Ib)
11621{
11622 IEMOP_MNEMONIC("mov AH,Ib");
11623 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pIemCpu->uRexB);
11624}
11625
11626
11627/** Opcode 0xb5. */
11628FNIEMOP_DEF(iemOp_CH_Ib)
11629{
11630 IEMOP_MNEMONIC("mov CH,Ib");
11631 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pIemCpu->uRexB);
11632}
11633
11634
11635/** Opcode 0xb6. */
11636FNIEMOP_DEF(iemOp_DH_Ib)
11637{
11638 IEMOP_MNEMONIC("mov DH,Ib");
11639 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pIemCpu->uRexB);
11640}
11641
11642
11643/** Opcode 0xb7. */
11644FNIEMOP_DEF(iemOp_BH_Ib)
11645{
11646 IEMOP_MNEMONIC("mov BH,Ib");
11647 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pIemCpu->uRexB);
11648}
11649
11650
11651/**
11652 * Common 'mov regX,immX' helper.
11653 */
11654FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
11655{
11656 switch (pIemCpu->enmEffOpSize)
11657 {
11658 case IEMMODE_16BIT:
11659 {
11660 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11661 IEMOP_HLP_NO_LOCK_PREFIX();
11662
11663 IEM_MC_BEGIN(0, 1);
11664 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
11665 IEM_MC_STORE_GREG_U16(iReg, u16Value);
11666 IEM_MC_ADVANCE_RIP();
11667 IEM_MC_END();
11668 break;
11669 }
11670
11671 case IEMMODE_32BIT:
11672 {
11673 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11674 IEMOP_HLP_NO_LOCK_PREFIX();
11675
11676 IEM_MC_BEGIN(0, 1);
11677 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
11678 IEM_MC_STORE_GREG_U32(iReg, u32Value);
11679 IEM_MC_ADVANCE_RIP();
11680 IEM_MC_END();
11681 break;
11682 }
11683 case IEMMODE_64BIT:
11684 {
11685 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
11686 IEMOP_HLP_NO_LOCK_PREFIX();
11687
11688 IEM_MC_BEGIN(0, 1);
11689 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
11690 IEM_MC_STORE_GREG_U64(iReg, u64Value);
11691 IEM_MC_ADVANCE_RIP();
11692 IEM_MC_END();
11693 break;
11694 }
11695 }
11696
11697 return VINF_SUCCESS;
11698}
11699
11700
11701/** Opcode 0xb8. */
11702FNIEMOP_DEF(iemOp_eAX_Iv)
11703{
11704 IEMOP_MNEMONIC("mov rAX,IV");
11705 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pIemCpu->uRexB);
11706}
11707
11708
11709/** Opcode 0xb9. */
11710FNIEMOP_DEF(iemOp_eCX_Iv)
11711{
11712 IEMOP_MNEMONIC("mov rCX,IV");
11713 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pIemCpu->uRexB);
11714}
11715
11716
11717/** Opcode 0xba. */
11718FNIEMOP_DEF(iemOp_eDX_Iv)
11719{
11720 IEMOP_MNEMONIC("mov rDX,IV");
11721 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pIemCpu->uRexB);
11722}
11723
11724
11725/** Opcode 0xbb. */
11726FNIEMOP_DEF(iemOp_eBX_Iv)
11727{
11728 IEMOP_MNEMONIC("mov rBX,IV");
11729 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pIemCpu->uRexB);
11730}
11731
11732
11733/** Opcode 0xbc. */
11734FNIEMOP_DEF(iemOp_eSP_Iv)
11735{
11736 IEMOP_MNEMONIC("mov rSP,IV");
11737 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pIemCpu->uRexB);
11738}
11739
11740
11741/** Opcode 0xbd. */
11742FNIEMOP_DEF(iemOp_eBP_Iv)
11743{
11744 IEMOP_MNEMONIC("mov rBP,IV");
11745 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pIemCpu->uRexB);
11746}
11747
11748
11749/** Opcode 0xbe. */
11750FNIEMOP_DEF(iemOp_eSI_Iv)
11751{
11752 IEMOP_MNEMONIC("mov rSI,IV");
11753 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pIemCpu->uRexB);
11754}
11755
11756
11757/** Opcode 0xbf. */
11758FNIEMOP_DEF(iemOp_eDI_Iv)
11759{
11760 IEMOP_MNEMONIC("mov rDI,IV");
11761 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pIemCpu->uRexB);
11762}
11763
11764
11765/** Opcode 0xc0. */
11766FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
11767{
11768 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11769 PCIEMOPSHIFTSIZES pImpl;
11770 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11771 {
11772 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,Ib"); break;
11773 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,Ib"); break;
11774 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,Ib"); break;
11775 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,Ib"); break;
11776 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,Ib"); break;
11777 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,Ib"); break;
11778 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,Ib"); break;
11779 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11780 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
11781 }
11782 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11783
11784 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11785 {
11786 /* register */
11787 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11788 IEMOP_HLP_NO_LOCK_PREFIX();
11789 IEM_MC_BEGIN(3, 0);
11790 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11791 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11792 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11793 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11794 IEM_MC_REF_EFLAGS(pEFlags);
11795 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11796 IEM_MC_ADVANCE_RIP();
11797 IEM_MC_END();
11798 }
11799 else
11800 {
11801 /* memory */
11802 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11803 IEM_MC_BEGIN(3, 2);
11804 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11805 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11806 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11808
11809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11810 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11811 IEM_MC_ASSIGN(cShiftArg, cShift);
11812 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11813 IEM_MC_FETCH_EFLAGS(EFlags);
11814 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
11815
11816 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
11817 IEM_MC_COMMIT_EFLAGS(EFlags);
11818 IEM_MC_ADVANCE_RIP();
11819 IEM_MC_END();
11820 }
11821 return VINF_SUCCESS;
11822}
11823
11824
11825/** Opcode 0xc1. */
11826FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
11827{
11828 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11829 PCIEMOPSHIFTSIZES pImpl;
11830 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11831 {
11832 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,Ib"); break;
11833 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,Ib"); break;
11834 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,Ib"); break;
11835 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,Ib"); break;
11836 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,Ib"); break;
11837 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,Ib"); break;
11838 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,Ib"); break;
11839 case 6: return IEMOP_RAISE_INVALID_OPCODE();
11840 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
11841 }
11842 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
11843
11844 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11845 {
11846 /* register */
11847 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11848 IEMOP_HLP_NO_LOCK_PREFIX();
11849 switch (pIemCpu->enmEffOpSize)
11850 {
11851 case IEMMODE_16BIT:
11852 IEM_MC_BEGIN(3, 0);
11853 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11854 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11855 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11856 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11857 IEM_MC_REF_EFLAGS(pEFlags);
11858 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
11859 IEM_MC_ADVANCE_RIP();
11860 IEM_MC_END();
11861 return VINF_SUCCESS;
11862
11863 case IEMMODE_32BIT:
11864 IEM_MC_BEGIN(3, 0);
11865 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11866 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11867 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11868 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11869 IEM_MC_REF_EFLAGS(pEFlags);
11870 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
11871 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
11872 IEM_MC_ADVANCE_RIP();
11873 IEM_MC_END();
11874 return VINF_SUCCESS;
11875
11876 case IEMMODE_64BIT:
11877 IEM_MC_BEGIN(3, 0);
11878 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11879 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
11880 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11881 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
11882 IEM_MC_REF_EFLAGS(pEFlags);
11883 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
11884 IEM_MC_ADVANCE_RIP();
11885 IEM_MC_END();
11886 return VINF_SUCCESS;
11887
11888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11889 }
11890 }
11891 else
11892 {
11893 /* memory */
11894 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
11895 switch (pIemCpu->enmEffOpSize)
11896 {
11897 case IEMMODE_16BIT:
11898 IEM_MC_BEGIN(3, 2);
11899 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11900 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11901 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11903
11904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11905 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11906 IEM_MC_ASSIGN(cShiftArg, cShift);
11907 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11908 IEM_MC_FETCH_EFLAGS(EFlags);
11909 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
11910
11911 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
11912 IEM_MC_COMMIT_EFLAGS(EFlags);
11913 IEM_MC_ADVANCE_RIP();
11914 IEM_MC_END();
11915 return VINF_SUCCESS;
11916
11917 case IEMMODE_32BIT:
11918 IEM_MC_BEGIN(3, 2);
11919 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11920 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11921 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11923
11924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11925 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11926 IEM_MC_ASSIGN(cShiftArg, cShift);
11927 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11928 IEM_MC_FETCH_EFLAGS(EFlags);
11929 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
11930
11931 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
11932 IEM_MC_COMMIT_EFLAGS(EFlags);
11933 IEM_MC_ADVANCE_RIP();
11934 IEM_MC_END();
11935 return VINF_SUCCESS;
11936
11937 case IEMMODE_64BIT:
11938 IEM_MC_BEGIN(3, 2);
11939 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11940 IEM_MC_ARG(uint8_t, cShiftArg, 1);
11941 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
11942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11943
11944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11945 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
11946 IEM_MC_ASSIGN(cShiftArg, cShift);
11947 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
11948 IEM_MC_FETCH_EFLAGS(EFlags);
11949 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
11950
11951 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
11952 IEM_MC_COMMIT_EFLAGS(EFlags);
11953 IEM_MC_ADVANCE_RIP();
11954 IEM_MC_END();
11955 return VINF_SUCCESS;
11956
11957 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11958 }
11959 }
11960}
11961
11962
11963/** Opcode 0xc2. */
11964FNIEMOP_DEF(iemOp_retn_Iw)
11965{
11966 IEMOP_MNEMONIC("retn Iw");
11967 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11968 IEMOP_HLP_NO_LOCK_PREFIX();
11969 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11970 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, u16Imm);
11971}
11972
11973
11974/** Opcode 0xc3. */
11975FNIEMOP_DEF(iemOp_retn)
11976{
11977 IEMOP_MNEMONIC("retn");
11978 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11979 IEMOP_HLP_NO_LOCK_PREFIX();
11980 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pIemCpu->enmEffOpSize, 0);
11981}
11982
11983
11984/** Opcode 0xc4. */
11985FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
11986{
11987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11988 if ( pIemCpu->enmCpuMode == IEMMODE_64BIT
11989 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11990 {
11991 IEMOP_MNEMONIC("2-byte-vex");
11992 /* The LES instruction is invalid 64-bit mode. In legacy and
11993 compatability mode it is invalid with MOD=3.
11994 The use as a VEX prefix is made possible by assigning the inverted
11995 REX.R to the top MOD bit, and the top bit in the inverted register
11996 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
11997 to accessing registers 0..7 in this VEX form. */
11998 /** @todo VEX: Just use new tables for it. */
11999 return IEMOP_RAISE_INVALID_OPCODE();
12000 }
12001 IEMOP_MNEMONIC("les Gv,Mp");
12002 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
12003}
12004
12005
12006/** Opcode 0xc5. */
12007FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
12008{
12009 /* The LDS instruction is invalid 64-bit mode. In legacy and
12010 compatability mode it is invalid with MOD=3.
12011 The use as a VEX prefix is made possible by assigning the inverted
12012 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
12013 outside of 64-bit mode. VEX is not available in real or v86 mode. */
12014 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12015 if (pIemCpu->enmCpuMode != IEMMODE_64BIT)
12016 {
12017 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
12018 {
12019 IEMOP_MNEMONIC("lds Gv,Mp");
12020 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
12021 }
12022 IEMOP_HLP_NO_REAL_OR_V86_MODE();
12023 }
12024
12025 IEMOP_MNEMONIC("3-byte-vex");
12026 /** @todo Test when exctly the VEX conformance checks kick in during
12027 * instruction decoding and fetching (using \#PF). */
12028 uint8_t bVex1; IEM_OPCODE_GET_NEXT_U8(&bVex1);
12029 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
12030 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
12031#if 0 /* will make sense of this next week... */
12032 if ( !(pIemCpu->fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX))
12033 &&
12034 )
12035 {
12036
12037 }
12038#endif
12039
12040 /** @todo VEX: Just use new tables for it. */
12041 return IEMOP_RAISE_INVALID_OPCODE();
12042}
12043
12044
12045/** Opcode 0xc6. */
12046FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
12047{
12048 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12049 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12050 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12051 return IEMOP_RAISE_INVALID_OPCODE();
12052 IEMOP_MNEMONIC("mov Eb,Ib");
12053
12054 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12055 {
12056 /* register access */
12057 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12058 IEM_MC_BEGIN(0, 0);
12059 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u8Imm);
12060 IEM_MC_ADVANCE_RIP();
12061 IEM_MC_END();
12062 }
12063 else
12064 {
12065 /* memory access. */
12066 IEM_MC_BEGIN(0, 1);
12067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12069 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12070 IEM_MC_STORE_MEM_U8(pIemCpu->iEffSeg, GCPtrEffDst, u8Imm);
12071 IEM_MC_ADVANCE_RIP();
12072 IEM_MC_END();
12073 }
12074 return VINF_SUCCESS;
12075}
12076
12077
12078/** Opcode 0xc7. */
12079FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
12080{
12081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12082 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12083 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
12084 return IEMOP_RAISE_INVALID_OPCODE();
12085 IEMOP_MNEMONIC("mov Ev,Iz");
12086
12087 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12088 {
12089 /* register access */
12090 switch (pIemCpu->enmEffOpSize)
12091 {
12092 case IEMMODE_16BIT:
12093 IEM_MC_BEGIN(0, 0);
12094 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12095 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u16Imm);
12096 IEM_MC_ADVANCE_RIP();
12097 IEM_MC_END();
12098 return VINF_SUCCESS;
12099
12100 case IEMMODE_32BIT:
12101 IEM_MC_BEGIN(0, 0);
12102 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12103 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u32Imm);
12104 IEM_MC_ADVANCE_RIP();
12105 IEM_MC_END();
12106 return VINF_SUCCESS;
12107
12108 case IEMMODE_64BIT:
12109 IEM_MC_BEGIN(0, 0);
12110 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12111 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB, u64Imm);
12112 IEM_MC_ADVANCE_RIP();
12113 IEM_MC_END();
12114 return VINF_SUCCESS;
12115
12116 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12117 }
12118 }
12119 else
12120 {
12121 /* memory access. */
12122 switch (pIemCpu->enmEffOpSize)
12123 {
12124 case IEMMODE_16BIT:
12125 IEM_MC_BEGIN(0, 1);
12126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12128 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12129 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Imm);
12130 IEM_MC_ADVANCE_RIP();
12131 IEM_MC_END();
12132 return VINF_SUCCESS;
12133
12134 case IEMMODE_32BIT:
12135 IEM_MC_BEGIN(0, 1);
12136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12138 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12139 IEM_MC_STORE_MEM_U32(pIemCpu->iEffSeg, GCPtrEffDst, u32Imm);
12140 IEM_MC_ADVANCE_RIP();
12141 IEM_MC_END();
12142 return VINF_SUCCESS;
12143
12144 case IEMMODE_64BIT:
12145 IEM_MC_BEGIN(0, 1);
12146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12148 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12149 IEM_MC_STORE_MEM_U64(pIemCpu->iEffSeg, GCPtrEffDst, u64Imm);
12150 IEM_MC_ADVANCE_RIP();
12151 IEM_MC_END();
12152 return VINF_SUCCESS;
12153
12154 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12155 }
12156 }
12157}
12158
12159
12160
12161
12162/** Opcode 0xc8. */
12163FNIEMOP_DEF(iemOp_enter_Iw_Ib)
12164{
12165 IEMOP_MNEMONIC("enter Iw,Ib");
12166 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12167 IEMOP_HLP_NO_LOCK_PREFIX();
12168 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
12169 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
12170 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pIemCpu->enmEffOpSize, cbFrame, u8NestingLevel);
12171}
12172
12173
12174/** Opcode 0xc9. */
12175FNIEMOP_DEF(iemOp_leave)
12176{
12177 IEMOP_MNEMONIC("retn");
12178 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12179 IEMOP_HLP_NO_LOCK_PREFIX();
12180 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pIemCpu->enmEffOpSize);
12181}
12182
12183
12184/** Opcode 0xca. */
12185FNIEMOP_DEF(iemOp_retf_Iw)
12186{
12187 IEMOP_MNEMONIC("retf Iw");
12188 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12189 IEMOP_HLP_NO_LOCK_PREFIX();
12190 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12191 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, u16Imm);
12192}
12193
12194
12195/** Opcode 0xcb. */
12196FNIEMOP_DEF(iemOp_retf)
12197{
12198 IEMOP_MNEMONIC("retf");
12199 IEMOP_HLP_NO_LOCK_PREFIX();
12200 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12201 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pIemCpu->enmEffOpSize, 0);
12202}
12203
12204
12205/** Opcode 0xcc. */
12206FNIEMOP_DEF(iemOp_int_3)
12207{
12208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12209 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
12210}
12211
12212
12213/** Opcode 0xcd. */
12214FNIEMOP_DEF(iemOp_int_Ib)
12215{
12216 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
12217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12218 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
12219}
12220
12221
12222/** Opcode 0xce. */
12223FNIEMOP_DEF(iemOp_into)
12224{
12225 IEMOP_MNEMONIC("into");
12226 IEMOP_HLP_NO_64BIT();
12227
12228 IEM_MC_BEGIN(2, 0);
12229 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
12230 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
12231 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
12232 IEM_MC_END();
12233 return VINF_SUCCESS;
12234}
12235
12236
12237/** Opcode 0xcf. */
12238FNIEMOP_DEF(iemOp_iret)
12239{
12240 IEMOP_MNEMONIC("iret");
12241 IEMOP_HLP_NO_LOCK_PREFIX();
12242 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pIemCpu->enmEffOpSize);
12243}
12244
12245
12246/** Opcode 0xd0. */
12247FNIEMOP_DEF(iemOp_Grp2_Eb_1)
12248{
12249 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12250 PCIEMOPSHIFTSIZES pImpl;
12251 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12252 {
12253 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,1"); break;
12254 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,1"); break;
12255 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,1"); break;
12256 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,1"); break;
12257 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,1"); break;
12258 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,1"); break;
12259 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,1"); break;
12260 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12261 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12262 }
12263 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12264
12265 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12266 {
12267 /* register */
12268 IEMOP_HLP_NO_LOCK_PREFIX();
12269 IEM_MC_BEGIN(3, 0);
12270 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12271 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12273 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12274 IEM_MC_REF_EFLAGS(pEFlags);
12275 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12276 IEM_MC_ADVANCE_RIP();
12277 IEM_MC_END();
12278 }
12279 else
12280 {
12281 /* memory */
12282 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12283 IEM_MC_BEGIN(3, 2);
12284 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12285 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
12286 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12288
12289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12290 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12291 IEM_MC_FETCH_EFLAGS(EFlags);
12292 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12293
12294 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12295 IEM_MC_COMMIT_EFLAGS(EFlags);
12296 IEM_MC_ADVANCE_RIP();
12297 IEM_MC_END();
12298 }
12299 return VINF_SUCCESS;
12300}
12301
12302
12303
12304/** Opcode 0xd1. */
12305FNIEMOP_DEF(iemOp_Grp2_Ev_1)
12306{
12307 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12308 PCIEMOPSHIFTSIZES pImpl;
12309 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12310 {
12311 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,1"); break;
12312 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,1"); break;
12313 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,1"); break;
12314 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,1"); break;
12315 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,1"); break;
12316 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,1"); break;
12317 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,1"); break;
12318 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12319 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
12320 }
12321 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12322
12323 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12324 {
12325 /* register */
12326 IEMOP_HLP_NO_LOCK_PREFIX();
12327 switch (pIemCpu->enmEffOpSize)
12328 {
12329 case IEMMODE_16BIT:
12330 IEM_MC_BEGIN(3, 0);
12331 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12332 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12333 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12334 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12335 IEM_MC_REF_EFLAGS(pEFlags);
12336 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12337 IEM_MC_ADVANCE_RIP();
12338 IEM_MC_END();
12339 return VINF_SUCCESS;
12340
12341 case IEMMODE_32BIT:
12342 IEM_MC_BEGIN(3, 0);
12343 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12344 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12345 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12346 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12347 IEM_MC_REF_EFLAGS(pEFlags);
12348 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12349 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12350 IEM_MC_ADVANCE_RIP();
12351 IEM_MC_END();
12352 return VINF_SUCCESS;
12353
12354 case IEMMODE_64BIT:
12355 IEM_MC_BEGIN(3, 0);
12356 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12357 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12358 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12359 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12360 IEM_MC_REF_EFLAGS(pEFlags);
12361 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12362 IEM_MC_ADVANCE_RIP();
12363 IEM_MC_END();
12364 return VINF_SUCCESS;
12365
12366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12367 }
12368 }
12369 else
12370 {
12371 /* memory */
12372 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12373 switch (pIemCpu->enmEffOpSize)
12374 {
12375 case IEMMODE_16BIT:
12376 IEM_MC_BEGIN(3, 2);
12377 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12378 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12379 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12381
12382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12383 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12384 IEM_MC_FETCH_EFLAGS(EFlags);
12385 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12386
12387 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12388 IEM_MC_COMMIT_EFLAGS(EFlags);
12389 IEM_MC_ADVANCE_RIP();
12390 IEM_MC_END();
12391 return VINF_SUCCESS;
12392
12393 case IEMMODE_32BIT:
12394 IEM_MC_BEGIN(3, 2);
12395 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12396 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12397 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12399
12400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12401 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12402 IEM_MC_FETCH_EFLAGS(EFlags);
12403 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12404
12405 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12406 IEM_MC_COMMIT_EFLAGS(EFlags);
12407 IEM_MC_ADVANCE_RIP();
12408 IEM_MC_END();
12409 return VINF_SUCCESS;
12410
12411 case IEMMODE_64BIT:
12412 IEM_MC_BEGIN(3, 2);
12413 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12414 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
12415 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12417
12418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12419 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12420 IEM_MC_FETCH_EFLAGS(EFlags);
12421 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12422
12423 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12424 IEM_MC_COMMIT_EFLAGS(EFlags);
12425 IEM_MC_ADVANCE_RIP();
12426 IEM_MC_END();
12427 return VINF_SUCCESS;
12428
12429 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12430 }
12431 }
12432}
12433
12434
12435/** Opcode 0xd2. */
12436FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
12437{
12438 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12439 PCIEMOPSHIFTSIZES pImpl;
12440 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12441 {
12442 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Eb,CL"); break;
12443 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Eb,CL"); break;
12444 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Eb,CL"); break;
12445 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Eb,CL"); break;
12446 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Eb,CL"); break;
12447 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Eb,CL"); break;
12448 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Eb,CL"); break;
12449 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12450 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
12451 }
12452 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12453
12454 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12455 {
12456 /* register */
12457 IEMOP_HLP_NO_LOCK_PREFIX();
12458 IEM_MC_BEGIN(3, 0);
12459 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12460 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12461 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12462 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12463 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12464 IEM_MC_REF_EFLAGS(pEFlags);
12465 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12466 IEM_MC_ADVANCE_RIP();
12467 IEM_MC_END();
12468 }
12469 else
12470 {
12471 /* memory */
12472 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12473 IEM_MC_BEGIN(3, 2);
12474 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12475 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12476 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12478
12479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12480 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12481 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12482 IEM_MC_FETCH_EFLAGS(EFlags);
12483 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
12484
12485 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
12486 IEM_MC_COMMIT_EFLAGS(EFlags);
12487 IEM_MC_ADVANCE_RIP();
12488 IEM_MC_END();
12489 }
12490 return VINF_SUCCESS;
12491}
12492
12493
12494/** Opcode 0xd3. */
12495FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
12496{
12497 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12498 PCIEMOPSHIFTSIZES pImpl;
12499 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
12500 {
12501 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC("rol Ev,CL"); break;
12502 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC("ror Ev,CL"); break;
12503 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC("rcl Ev,CL"); break;
12504 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC("rcr Ev,CL"); break;
12505 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC("shl Ev,CL"); break;
12506 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC("shr Ev,CL"); break;
12507 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC("sar Ev,CL"); break;
12508 case 6: return IEMOP_RAISE_INVALID_OPCODE();
12509 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
12510 }
12511 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
12512
12513 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
12514 {
12515 /* register */
12516 IEMOP_HLP_NO_LOCK_PREFIX();
12517 switch (pIemCpu->enmEffOpSize)
12518 {
12519 case IEMMODE_16BIT:
12520 IEM_MC_BEGIN(3, 0);
12521 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12522 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12523 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12524 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12525 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12526 IEM_MC_REF_EFLAGS(pEFlags);
12527 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12528 IEM_MC_ADVANCE_RIP();
12529 IEM_MC_END();
12530 return VINF_SUCCESS;
12531
12532 case IEMMODE_32BIT:
12533 IEM_MC_BEGIN(3, 0);
12534 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12535 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12536 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12537 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12538 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12539 IEM_MC_REF_EFLAGS(pEFlags);
12540 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12541 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
12542 IEM_MC_ADVANCE_RIP();
12543 IEM_MC_END();
12544 return VINF_SUCCESS;
12545
12546 case IEMMODE_64BIT:
12547 IEM_MC_BEGIN(3, 0);
12548 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12549 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12550 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12551 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
12552 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12553 IEM_MC_REF_EFLAGS(pEFlags);
12554 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12555 IEM_MC_ADVANCE_RIP();
12556 IEM_MC_END();
12557 return VINF_SUCCESS;
12558
12559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12560 }
12561 }
12562 else
12563 {
12564 /* memory */
12565 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
12566 switch (pIemCpu->enmEffOpSize)
12567 {
12568 case IEMMODE_16BIT:
12569 IEM_MC_BEGIN(3, 2);
12570 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12571 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12572 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12573 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12574
12575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12576 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12577 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12578 IEM_MC_FETCH_EFLAGS(EFlags);
12579 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
12580
12581 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
12582 IEM_MC_COMMIT_EFLAGS(EFlags);
12583 IEM_MC_ADVANCE_RIP();
12584 IEM_MC_END();
12585 return VINF_SUCCESS;
12586
12587 case IEMMODE_32BIT:
12588 IEM_MC_BEGIN(3, 2);
12589 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12590 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12591 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12593
12594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12595 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12596 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12597 IEM_MC_FETCH_EFLAGS(EFlags);
12598 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
12599
12600 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
12601 IEM_MC_COMMIT_EFLAGS(EFlags);
12602 IEM_MC_ADVANCE_RIP();
12603 IEM_MC_END();
12604 return VINF_SUCCESS;
12605
12606 case IEMMODE_64BIT:
12607 IEM_MC_BEGIN(3, 2);
12608 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12609 IEM_MC_ARG(uint8_t, cShiftArg, 1);
12610 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
12611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12612
12613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12614 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
12615 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
12616 IEM_MC_FETCH_EFLAGS(EFlags);
12617 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
12618
12619 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
12620 IEM_MC_COMMIT_EFLAGS(EFlags);
12621 IEM_MC_ADVANCE_RIP();
12622 IEM_MC_END();
12623 return VINF_SUCCESS;
12624
12625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12626 }
12627 }
12628}
12629
12630/** Opcode 0xd4. */
12631FNIEMOP_DEF(iemOp_aam_Ib)
12632{
12633 IEMOP_MNEMONIC("aam Ib");
12634 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12635 IEMOP_HLP_NO_LOCK_PREFIX();
12636 IEMOP_HLP_NO_64BIT();
12637 if (!bImm)
12638 return IEMOP_RAISE_DIVIDE_ERROR();
12639 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
12640}
12641
12642
12643/** Opcode 0xd5. */
12644FNIEMOP_DEF(iemOp_aad_Ib)
12645{
12646 IEMOP_MNEMONIC("aad Ib");
12647 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12648 IEMOP_HLP_NO_LOCK_PREFIX();
12649 IEMOP_HLP_NO_64BIT();
12650 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
12651}
12652
12653
12654/** Opcode 0xd6. */
12655FNIEMOP_DEF(iemOp_salc)
12656{
12657 IEMOP_MNEMONIC("salc");
12658 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
12659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12660 IEMOP_HLP_NO_64BIT();
12661
12662 IEM_MC_BEGIN(0, 0);
12663 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
12664 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
12665 } IEM_MC_ELSE() {
12666 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
12667 } IEM_MC_ENDIF();
12668 IEM_MC_ADVANCE_RIP();
12669 IEM_MC_END();
12670 return VINF_SUCCESS;
12671}
12672
12673
12674/** Opcode 0xd7. */
12675FNIEMOP_DEF(iemOp_xlat)
12676{
12677 IEMOP_MNEMONIC("xlat");
12678 IEMOP_HLP_NO_LOCK_PREFIX();
12679 switch (pIemCpu->enmEffAddrMode)
12680 {
12681 case IEMMODE_16BIT:
12682 IEM_MC_BEGIN(2, 0);
12683 IEM_MC_LOCAL(uint8_t, u8Tmp);
12684 IEM_MC_LOCAL(uint16_t, u16Addr);
12685 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
12686 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
12687 IEM_MC_FETCH_MEM16_U8(u8Tmp, pIemCpu->iEffSeg, u16Addr);
12688 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12689 IEM_MC_ADVANCE_RIP();
12690 IEM_MC_END();
12691 return VINF_SUCCESS;
12692
12693 case IEMMODE_32BIT:
12694 IEM_MC_BEGIN(2, 0);
12695 IEM_MC_LOCAL(uint8_t, u8Tmp);
12696 IEM_MC_LOCAL(uint32_t, u32Addr);
12697 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
12698 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
12699 IEM_MC_FETCH_MEM32_U8(u8Tmp, pIemCpu->iEffSeg, u32Addr);
12700 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12701 IEM_MC_ADVANCE_RIP();
12702 IEM_MC_END();
12703 return VINF_SUCCESS;
12704
12705 case IEMMODE_64BIT:
12706 IEM_MC_BEGIN(2, 0);
12707 IEM_MC_LOCAL(uint8_t, u8Tmp);
12708 IEM_MC_LOCAL(uint64_t, u64Addr);
12709 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
12710 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
12711 IEM_MC_FETCH_MEM_U8(u8Tmp, pIemCpu->iEffSeg, u64Addr);
12712 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
12713 IEM_MC_ADVANCE_RIP();
12714 IEM_MC_END();
12715 return VINF_SUCCESS;
12716
12717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12718 }
12719}
12720
12721
12722/**
12723 * Common worker for FPU instructions working on ST0 and STn, and storing the
12724 * result in ST0.
12725 *
12726 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12727 */
12728FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
12729{
12730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12731
12732 IEM_MC_BEGIN(3, 1);
12733 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12734 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12735 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12736 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12737
12738 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12739 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12740 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12741 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
12742 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
12743 IEM_MC_ELSE()
12744 IEM_MC_FPU_STACK_UNDERFLOW(0);
12745 IEM_MC_ENDIF();
12746 IEM_MC_USED_FPU();
12747 IEM_MC_ADVANCE_RIP();
12748
12749 IEM_MC_END();
12750 return VINF_SUCCESS;
12751}
12752
12753
12754/**
12755 * Common worker for FPU instructions working on ST0 and STn, and only affecting
12756 * flags.
12757 *
12758 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12759 */
12760FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
12761{
12762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12763
12764 IEM_MC_BEGIN(3, 1);
12765 IEM_MC_LOCAL(uint16_t, u16Fsw);
12766 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12767 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12768 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12769
12770 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12771 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12772 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12773 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
12774 IEM_MC_UPDATE_FSW(u16Fsw);
12775 IEM_MC_ELSE()
12776 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
12777 IEM_MC_ENDIF();
12778 IEM_MC_USED_FPU();
12779 IEM_MC_ADVANCE_RIP();
12780
12781 IEM_MC_END();
12782 return VINF_SUCCESS;
12783}
12784
12785
12786/**
12787 * Common worker for FPU instructions working on ST0 and STn, only affecting
12788 * flags, and popping when done.
12789 *
12790 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12791 */
12792FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
12793{
12794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12795
12796 IEM_MC_BEGIN(3, 1);
12797 IEM_MC_LOCAL(uint16_t, u16Fsw);
12798 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12799 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12800 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
12801
12802 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12803 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12804 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
12805 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
12806 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
12807 IEM_MC_ELSE()
12808 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
12809 IEM_MC_ENDIF();
12810 IEM_MC_USED_FPU();
12811 IEM_MC_ADVANCE_RIP();
12812
12813 IEM_MC_END();
12814 return VINF_SUCCESS;
12815}
12816
12817
12818/** Opcode 0xd8 11/0. */
12819FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
12820{
12821 IEMOP_MNEMONIC("fadd st0,stN");
12822 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
12823}
12824
12825
12826/** Opcode 0xd8 11/1. */
12827FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
12828{
12829 IEMOP_MNEMONIC("fmul st0,stN");
12830 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
12831}
12832
12833
12834/** Opcode 0xd8 11/2. */
12835FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
12836{
12837 IEMOP_MNEMONIC("fcom st0,stN");
12838 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
12839}
12840
12841
12842/** Opcode 0xd8 11/3. */
12843FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
12844{
12845 IEMOP_MNEMONIC("fcomp st0,stN");
12846 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
12847}
12848
12849
12850/** Opcode 0xd8 11/4. */
12851FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
12852{
12853 IEMOP_MNEMONIC("fsub st0,stN");
12854 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
12855}
12856
12857
12858/** Opcode 0xd8 11/5. */
12859FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
12860{
12861 IEMOP_MNEMONIC("fsubr st0,stN");
12862 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
12863}
12864
12865
12866/** Opcode 0xd8 11/6. */
12867FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
12868{
12869 IEMOP_MNEMONIC("fdiv st0,stN");
12870 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
12871}
12872
12873
12874/** Opcode 0xd8 11/7. */
12875FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
12876{
12877 IEMOP_MNEMONIC("fdivr st0,stN");
12878 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
12879}
12880
12881
12882/**
12883 * Common worker for FPU instructions working on ST0 and an m32r, and storing
12884 * the result in ST0.
12885 *
12886 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12887 */
12888FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
12889{
12890 IEM_MC_BEGIN(3, 3);
12891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12892 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12893 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
12894 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12895 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12896 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
12897
12898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12900
12901 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12902 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12903 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12904
12905 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12906 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
12907 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
12908 IEM_MC_ELSE()
12909 IEM_MC_FPU_STACK_UNDERFLOW(0);
12910 IEM_MC_ENDIF();
12911 IEM_MC_USED_FPU();
12912 IEM_MC_ADVANCE_RIP();
12913
12914 IEM_MC_END();
12915 return VINF_SUCCESS;
12916}
12917
12918
12919/** Opcode 0xd8 !11/0. */
12920FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
12921{
12922 IEMOP_MNEMONIC("fadd st0,m32r");
12923 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
12924}
12925
12926
12927/** Opcode 0xd8 !11/1. */
12928FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
12929{
12930 IEMOP_MNEMONIC("fmul st0,m32r");
12931 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
12932}
12933
12934
12935/** Opcode 0xd8 !11/2. */
12936FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
12937{
12938 IEMOP_MNEMONIC("fcom st0,m32r");
12939
12940 IEM_MC_BEGIN(3, 3);
12941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12942 IEM_MC_LOCAL(uint16_t, u16Fsw);
12943 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
12944 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12945 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12946 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
12947
12948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12950
12951 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12952 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12953 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12954
12955 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12956 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
12957 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
12958 IEM_MC_ELSE()
12959 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
12960 IEM_MC_ENDIF();
12961 IEM_MC_USED_FPU();
12962 IEM_MC_ADVANCE_RIP();
12963
12964 IEM_MC_END();
12965 return VINF_SUCCESS;
12966}
12967
12968
12969/** Opcode 0xd8 !11/3. */
12970FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
12971{
12972 IEMOP_MNEMONIC("fcomp st0,m32r");
12973
12974 IEM_MC_BEGIN(3, 3);
12975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12976 IEM_MC_LOCAL(uint16_t, u16Fsw);
12977 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
12978 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12979 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12980 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
12981
12982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12984
12985 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12986 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12987 IEM_MC_FETCH_MEM_R32(r32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
12988
12989 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
12990 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
12991 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
12992 IEM_MC_ELSE()
12993 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
12994 IEM_MC_ENDIF();
12995 IEM_MC_USED_FPU();
12996 IEM_MC_ADVANCE_RIP();
12997
12998 IEM_MC_END();
12999 return VINF_SUCCESS;
13000}
13001
13002
13003/** Opcode 0xd8 !11/4. */
13004FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
13005{
13006 IEMOP_MNEMONIC("fsub st0,m32r");
13007 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
13008}
13009
13010
13011/** Opcode 0xd8 !11/5. */
13012FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
13013{
13014 IEMOP_MNEMONIC("fsubr st0,m32r");
13015 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
13016}
13017
13018
13019/** Opcode 0xd8 !11/6. */
13020FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
13021{
13022 IEMOP_MNEMONIC("fdiv st0,m32r");
13023 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
13024}
13025
13026
13027/** Opcode 0xd8 !11/7. */
13028FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
13029{
13030 IEMOP_MNEMONIC("fdivr st0,m32r");
13031 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
13032}
13033
13034
13035/** Opcode 0xd8. */
13036FNIEMOP_DEF(iemOp_EscF0)
13037{
13038 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13039 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13040
13041 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13042 {
13043 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13044 {
13045 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
13046 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
13047 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
13048 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
13049 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
13050 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
13051 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
13052 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
13053 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13054 }
13055 }
13056 else
13057 {
13058 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13059 {
13060 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
13061 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
13062 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
13063 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
13064 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
13065 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
13066 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
13067 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
13068 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13069 }
13070 }
13071}
13072
13073
13074/** Opcode 0xd9 /0 mem32real
13075 * @sa iemOp_fld_m64r */
13076FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
13077{
13078 IEMOP_MNEMONIC("fld m32r");
13079
13080 IEM_MC_BEGIN(2, 3);
13081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13082 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13083 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
13084 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13085 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
13086
13087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13089
13090 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13091 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13092 IEM_MC_FETCH_MEM_R32(r32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
13093
13094 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13095 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
13096 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
13097 IEM_MC_ELSE()
13098 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
13099 IEM_MC_ENDIF();
13100 IEM_MC_USED_FPU();
13101 IEM_MC_ADVANCE_RIP();
13102
13103 IEM_MC_END();
13104 return VINF_SUCCESS;
13105}
13106
13107
13108/** Opcode 0xd9 !11/2 mem32real */
13109FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
13110{
13111 IEMOP_MNEMONIC("fst m32r");
13112 IEM_MC_BEGIN(3, 2);
13113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13114 IEM_MC_LOCAL(uint16_t, u16Fsw);
13115 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13116 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13117 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13118
13119 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13121 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13122 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13123
13124 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13125 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13126 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13127 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13128 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13129 IEM_MC_ELSE()
13130 IEM_MC_IF_FCW_IM()
13131 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13132 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13133 IEM_MC_ENDIF();
13134 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13135 IEM_MC_ENDIF();
13136 IEM_MC_USED_FPU();
13137 IEM_MC_ADVANCE_RIP();
13138
13139 IEM_MC_END();
13140 return VINF_SUCCESS;
13141}
13142
13143
13144/** Opcode 0xd9 !11/3 */
13145FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
13146{
13147 IEMOP_MNEMONIC("fstp m32r");
13148 IEM_MC_BEGIN(3, 2);
13149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13150 IEM_MC_LOCAL(uint16_t, u16Fsw);
13151 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13152 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
13153 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
13154
13155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13157 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13158 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13159
13160 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
13161 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13162 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
13163 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
13164 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
13165 IEM_MC_ELSE()
13166 IEM_MC_IF_FCW_IM()
13167 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
13168 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
13169 IEM_MC_ENDIF();
13170 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
13171 IEM_MC_ENDIF();
13172 IEM_MC_USED_FPU();
13173 IEM_MC_ADVANCE_RIP();
13174
13175 IEM_MC_END();
13176 return VINF_SUCCESS;
13177}
13178
13179
13180/** Opcode 0xd9 !11/4 */
13181FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
13182{
13183 IEMOP_MNEMONIC("fldenv m14/28byte");
13184 IEM_MC_BEGIN(3, 0);
13185 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13186 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13187 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
13188 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13190 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13191 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13192 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
13193 IEM_MC_END();
13194 return VINF_SUCCESS;
13195}
13196
13197
13198/** Opcode 0xd9 !11/5 */
13199FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
13200{
13201 IEMOP_MNEMONIC("fldcw m2byte");
13202 IEM_MC_BEGIN(1, 1);
13203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13204 IEM_MC_ARG(uint16_t, u16Fsw, 0);
13205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13207 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13208 IEM_MC_FETCH_MEM_U16(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
13209 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
13210 IEM_MC_END();
13211 return VINF_SUCCESS;
13212}
13213
13214
13215/** Opcode 0xd9 !11/6 */
13216FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
13217{
13218 IEMOP_MNEMONIC("fstenv m14/m28byte");
13219 IEM_MC_BEGIN(3, 0);
13220 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
13221 IEM_MC_ARG(uint8_t, iEffSeg, 1);
13222 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
13223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13225 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13226 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
13227 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
13228 IEM_MC_END();
13229 return VINF_SUCCESS;
13230}
13231
13232
13233/** Opcode 0xd9 !11/7 */
13234FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
13235{
13236 IEMOP_MNEMONIC("fnstcw m2byte");
13237 IEM_MC_BEGIN(2, 0);
13238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13239 IEM_MC_LOCAL(uint16_t, u16Fcw);
13240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13242 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13243 IEM_MC_FETCH_FCW(u16Fcw);
13244 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Fcw);
13245 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13246 IEM_MC_END();
13247 return VINF_SUCCESS;
13248}
13249
13250
13251/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
13252FNIEMOP_DEF(iemOp_fnop)
13253{
13254 IEMOP_MNEMONIC("fnop");
13255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13256
13257 IEM_MC_BEGIN(0, 0);
13258 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13259 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13260 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
13261 * intel optimizations. Investigate. */
13262 IEM_MC_UPDATE_FPU_OPCODE_IP();
13263 IEM_MC_USED_FPU();
13264 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
13265 IEM_MC_END();
13266 return VINF_SUCCESS;
13267}
13268
13269
13270/** Opcode 0xd9 11/0 stN */
13271FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
13272{
13273 IEMOP_MNEMONIC("fld stN");
13274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13275
13276 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13277 * indicates that it does. */
13278 IEM_MC_BEGIN(0, 2);
13279 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13280 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13281 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13282 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13283 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
13284 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13285 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13286 IEM_MC_ELSE()
13287 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
13288 IEM_MC_ENDIF();
13289 IEM_MC_USED_FPU();
13290 IEM_MC_ADVANCE_RIP();
13291 IEM_MC_END();
13292
13293 return VINF_SUCCESS;
13294}
13295
13296
13297/** Opcode 0xd9 11/3 stN */
13298FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
13299{
13300 IEMOP_MNEMONIC("fxch stN");
13301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13302
13303 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
13304 * indicates that it does. */
13305 IEM_MC_BEGIN(1, 3);
13306 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
13307 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
13308 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13309 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
13310 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13311 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13312 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
13313 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
13314 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
13315 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13316 IEM_MC_ELSE()
13317 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
13318 IEM_MC_ENDIF();
13319 IEM_MC_USED_FPU();
13320 IEM_MC_ADVANCE_RIP();
13321 IEM_MC_END();
13322
13323 return VINF_SUCCESS;
13324}
13325
13326
13327/** Opcode 0xd9 11/4, 0xdd 11/2. */
13328FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
13329{
13330 IEMOP_MNEMONIC("fstp st0,stN");
13331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13332
13333 /* fstp st0, st0 is frequendly used as an official 'ffreep st0' sequence. */
13334 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
13335 if (!iDstReg)
13336 {
13337 IEM_MC_BEGIN(0, 1);
13338 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
13339 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13340 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13341 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
13342 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
13343 IEM_MC_ELSE()
13344 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
13345 IEM_MC_ENDIF();
13346 IEM_MC_USED_FPU();
13347 IEM_MC_ADVANCE_RIP();
13348 IEM_MC_END();
13349 }
13350 else
13351 {
13352 IEM_MC_BEGIN(0, 2);
13353 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
13354 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13355 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13356 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13357 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13358 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
13359 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
13360 IEM_MC_ELSE()
13361 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
13362 IEM_MC_ENDIF();
13363 IEM_MC_USED_FPU();
13364 IEM_MC_ADVANCE_RIP();
13365 IEM_MC_END();
13366 }
13367 return VINF_SUCCESS;
13368}
13369
13370
13371/**
13372 * Common worker for FPU instructions working on ST0 and replaces it with the
13373 * result, i.e. unary operators.
13374 *
13375 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13376 */
13377FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
13378{
13379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13380
13381 IEM_MC_BEGIN(2, 1);
13382 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13383 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13384 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13385
13386 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13387 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13388 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13389 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
13390 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
13391 IEM_MC_ELSE()
13392 IEM_MC_FPU_STACK_UNDERFLOW(0);
13393 IEM_MC_ENDIF();
13394 IEM_MC_USED_FPU();
13395 IEM_MC_ADVANCE_RIP();
13396
13397 IEM_MC_END();
13398 return VINF_SUCCESS;
13399}
13400
13401
13402/** Opcode 0xd9 0xe0. */
13403FNIEMOP_DEF(iemOp_fchs)
13404{
13405 IEMOP_MNEMONIC("fchs st0");
13406 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
13407}
13408
13409
13410/** Opcode 0xd9 0xe1. */
13411FNIEMOP_DEF(iemOp_fabs)
13412{
13413 IEMOP_MNEMONIC("fabs st0");
13414 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
13415}
13416
13417
13418/**
13419 * Common worker for FPU instructions working on ST0 and only returns FSW.
13420 *
13421 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13422 */
13423FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
13424{
13425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13426
13427 IEM_MC_BEGIN(2, 1);
13428 IEM_MC_LOCAL(uint16_t, u16Fsw);
13429 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13430 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13431
13432 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13433 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13434 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13435 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
13436 IEM_MC_UPDATE_FSW(u16Fsw);
13437 IEM_MC_ELSE()
13438 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
13439 IEM_MC_ENDIF();
13440 IEM_MC_USED_FPU();
13441 IEM_MC_ADVANCE_RIP();
13442
13443 IEM_MC_END();
13444 return VINF_SUCCESS;
13445}
13446
13447
13448/** Opcode 0xd9 0xe4. */
13449FNIEMOP_DEF(iemOp_ftst)
13450{
13451 IEMOP_MNEMONIC("ftst st0");
13452 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
13453}
13454
13455
13456/** Opcode 0xd9 0xe5. */
13457FNIEMOP_DEF(iemOp_fxam)
13458{
13459 IEMOP_MNEMONIC("fxam st0");
13460 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
13461}
13462
13463
13464/**
13465 * Common worker for FPU instructions pushing a constant onto the FPU stack.
13466 *
13467 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13468 */
13469FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
13470{
13471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13472
13473 IEM_MC_BEGIN(1, 1);
13474 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13475 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13476
13477 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13478 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13479 IEM_MC_IF_FPUREG_IS_EMPTY(7)
13480 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
13481 IEM_MC_PUSH_FPU_RESULT(FpuRes);
13482 IEM_MC_ELSE()
13483 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
13484 IEM_MC_ENDIF();
13485 IEM_MC_USED_FPU();
13486 IEM_MC_ADVANCE_RIP();
13487
13488 IEM_MC_END();
13489 return VINF_SUCCESS;
13490}
13491
13492
13493/** Opcode 0xd9 0xe8. */
13494FNIEMOP_DEF(iemOp_fld1)
13495{
13496 IEMOP_MNEMONIC("fld1");
13497 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
13498}
13499
13500
13501/** Opcode 0xd9 0xe9. */
13502FNIEMOP_DEF(iemOp_fldl2t)
13503{
13504 IEMOP_MNEMONIC("fldl2t");
13505 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
13506}
13507
13508
13509/** Opcode 0xd9 0xea. */
13510FNIEMOP_DEF(iemOp_fldl2e)
13511{
13512 IEMOP_MNEMONIC("fldl2e");
13513 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
13514}
13515
13516/** Opcode 0xd9 0xeb. */
13517FNIEMOP_DEF(iemOp_fldpi)
13518{
13519 IEMOP_MNEMONIC("fldpi");
13520 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
13521}
13522
13523
13524/** Opcode 0xd9 0xec. */
13525FNIEMOP_DEF(iemOp_fldlg2)
13526{
13527 IEMOP_MNEMONIC("fldlg2");
13528 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
13529}
13530
13531/** Opcode 0xd9 0xed. */
13532FNIEMOP_DEF(iemOp_fldln2)
13533{
13534 IEMOP_MNEMONIC("fldln2");
13535 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
13536}
13537
13538
13539/** Opcode 0xd9 0xee. */
13540FNIEMOP_DEF(iemOp_fldz)
13541{
13542 IEMOP_MNEMONIC("fldz");
13543 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
13544}
13545
13546
13547/** Opcode 0xd9 0xf0. */
13548FNIEMOP_DEF(iemOp_f2xm1)
13549{
13550 IEMOP_MNEMONIC("f2xm1 st0");
13551 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
13552}
13553
13554
13555/** Opcode 0xd9 0xf1. */
13556FNIEMOP_DEF(iemOp_fylx2)
13557{
13558 IEMOP_MNEMONIC("fylx2 st0");
13559 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fyl2x_r80);
13560}
13561
13562
13563/**
13564 * Common worker for FPU instructions working on ST0 and having two outputs, one
13565 * replacing ST0 and one pushed onto the stack.
13566 *
13567 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13568 */
13569FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
13570{
13571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13572
13573 IEM_MC_BEGIN(2, 1);
13574 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
13575 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
13576 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
13577
13578 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13579 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13580 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
13581 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
13582 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
13583 IEM_MC_ELSE()
13584 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
13585 IEM_MC_ENDIF();
13586 IEM_MC_USED_FPU();
13587 IEM_MC_ADVANCE_RIP();
13588
13589 IEM_MC_END();
13590 return VINF_SUCCESS;
13591}
13592
13593
13594/** Opcode 0xd9 0xf2. */
13595FNIEMOP_DEF(iemOp_fptan)
13596{
13597 IEMOP_MNEMONIC("fptan st0");
13598 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
13599}
13600
13601
13602/**
13603 * Common worker for FPU instructions working on STn and ST0, storing the result
13604 * in STn, and popping the stack unless IE, DE or ZE was raised.
13605 *
13606 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13607 */
13608FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
13609{
13610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13611
13612 IEM_MC_BEGIN(3, 1);
13613 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
13614 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
13615 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13616 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13617
13618 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13619 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13620
13621 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
13622 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
13623 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
13624 IEM_MC_ELSE()
13625 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
13626 IEM_MC_ENDIF();
13627 IEM_MC_USED_FPU();
13628 IEM_MC_ADVANCE_RIP();
13629
13630 IEM_MC_END();
13631 return VINF_SUCCESS;
13632}
13633
13634
13635/** Opcode 0xd9 0xf3. */
13636FNIEMOP_DEF(iemOp_fpatan)
13637{
13638 IEMOP_MNEMONIC("fpatan st1,st0");
13639 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
13640}
13641
13642
13643/** Opcode 0xd9 0xf4. */
13644FNIEMOP_DEF(iemOp_fxtract)
13645{
13646 IEMOP_MNEMONIC("fxtract st0");
13647 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
13648}
13649
13650
13651/** Opcode 0xd9 0xf5. */
13652FNIEMOP_DEF(iemOp_fprem1)
13653{
13654 IEMOP_MNEMONIC("fprem1 st0, st1");
13655 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
13656}
13657
13658
13659/** Opcode 0xd9 0xf6. */
13660FNIEMOP_DEF(iemOp_fdecstp)
13661{
13662 IEMOP_MNEMONIC("fdecstp");
13663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13664 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13665 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13666 * FINCSTP and FDECSTP. */
13667
13668 IEM_MC_BEGIN(0,0);
13669
13670 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13671 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13672
13673 IEM_MC_FPU_STACK_DEC_TOP();
13674 IEM_MC_UPDATE_FSW_CONST(0);
13675
13676 IEM_MC_USED_FPU();
13677 IEM_MC_ADVANCE_RIP();
13678 IEM_MC_END();
13679 return VINF_SUCCESS;
13680}
13681
13682
13683/** Opcode 0xd9 0xf7. */
13684FNIEMOP_DEF(iemOp_fincstp)
13685{
13686 IEMOP_MNEMONIC("fincstp");
13687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13688 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
13689 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
13690 * FINCSTP and FDECSTP. */
13691
13692 IEM_MC_BEGIN(0,0);
13693
13694 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13695 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13696
13697 IEM_MC_FPU_STACK_INC_TOP();
13698 IEM_MC_UPDATE_FSW_CONST(0);
13699
13700 IEM_MC_USED_FPU();
13701 IEM_MC_ADVANCE_RIP();
13702 IEM_MC_END();
13703 return VINF_SUCCESS;
13704}
13705
13706
13707/** Opcode 0xd9 0xf8. */
13708FNIEMOP_DEF(iemOp_fprem)
13709{
13710 IEMOP_MNEMONIC("fprem st0, st1");
13711 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
13712}
13713
13714
13715/** Opcode 0xd9 0xf9. */
13716FNIEMOP_DEF(iemOp_fyl2xp1)
13717{
13718 IEMOP_MNEMONIC("fyl2xp1 st1,st0");
13719 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
13720}
13721
13722
13723/** Opcode 0xd9 0xfa. */
13724FNIEMOP_DEF(iemOp_fsqrt)
13725{
13726 IEMOP_MNEMONIC("fsqrt st0");
13727 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
13728}
13729
13730
13731/** Opcode 0xd9 0xfb. */
13732FNIEMOP_DEF(iemOp_fsincos)
13733{
13734 IEMOP_MNEMONIC("fsincos st0");
13735 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
13736}
13737
13738
13739/** Opcode 0xd9 0xfc. */
13740FNIEMOP_DEF(iemOp_frndint)
13741{
13742 IEMOP_MNEMONIC("frndint st0");
13743 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
13744}
13745
13746
13747/** Opcode 0xd9 0xfd. */
13748FNIEMOP_DEF(iemOp_fscale)
13749{
13750 IEMOP_MNEMONIC("fscale st0, st1");
13751 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
13752}
13753
13754
13755/** Opcode 0xd9 0xfe. */
13756FNIEMOP_DEF(iemOp_fsin)
13757{
13758 IEMOP_MNEMONIC("fsin st0");
13759 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
13760}
13761
13762
13763/** Opcode 0xd9 0xff. */
13764FNIEMOP_DEF(iemOp_fcos)
13765{
13766 IEMOP_MNEMONIC("fcos st0");
13767 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
13768}
13769
13770
13771/** Used by iemOp_EscF1. */
13772static const PFNIEMOP g_apfnEscF1_E0toFF[32] =
13773{
13774 /* 0xe0 */ iemOp_fchs,
13775 /* 0xe1 */ iemOp_fabs,
13776 /* 0xe2 */ iemOp_Invalid,
13777 /* 0xe3 */ iemOp_Invalid,
13778 /* 0xe4 */ iemOp_ftst,
13779 /* 0xe5 */ iemOp_fxam,
13780 /* 0xe6 */ iemOp_Invalid,
13781 /* 0xe7 */ iemOp_Invalid,
13782 /* 0xe8 */ iemOp_fld1,
13783 /* 0xe9 */ iemOp_fldl2t,
13784 /* 0xea */ iemOp_fldl2e,
13785 /* 0xeb */ iemOp_fldpi,
13786 /* 0xec */ iemOp_fldlg2,
13787 /* 0xed */ iemOp_fldln2,
13788 /* 0xee */ iemOp_fldz,
13789 /* 0xef */ iemOp_Invalid,
13790 /* 0xf0 */ iemOp_f2xm1,
13791 /* 0xf1 */ iemOp_fylx2,
13792 /* 0xf2 */ iemOp_fptan,
13793 /* 0xf3 */ iemOp_fpatan,
13794 /* 0xf4 */ iemOp_fxtract,
13795 /* 0xf5 */ iemOp_fprem1,
13796 /* 0xf6 */ iemOp_fdecstp,
13797 /* 0xf7 */ iemOp_fincstp,
13798 /* 0xf8 */ iemOp_fprem,
13799 /* 0xf9 */ iemOp_fyl2xp1,
13800 /* 0xfa */ iemOp_fsqrt,
13801 /* 0xfb */ iemOp_fsincos,
13802 /* 0xfc */ iemOp_frndint,
13803 /* 0xfd */ iemOp_fscale,
13804 /* 0xfe */ iemOp_fsin,
13805 /* 0xff */ iemOp_fcos
13806};
13807
13808
13809/** Opcode 0xd9. */
13810FNIEMOP_DEF(iemOp_EscF1)
13811{
13812 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
13813 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13814 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
13815 {
13816 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13817 {
13818 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
13819 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
13820 case 2:
13821 if (bRm == 0xd0)
13822 return FNIEMOP_CALL(iemOp_fnop);
13823 return IEMOP_RAISE_INVALID_OPCODE();
13824 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
13825 case 4:
13826 case 5:
13827 case 6:
13828 case 7:
13829 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
13830 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
13831 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13832 }
13833 }
13834 else
13835 {
13836 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
13837 {
13838 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
13839 case 1: return IEMOP_RAISE_INVALID_OPCODE();
13840 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
13841 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
13842 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
13843 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
13844 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
13845 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
13846 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13847 }
13848 }
13849}
13850
13851
13852/** Opcode 0xda 11/0. */
13853FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
13854{
13855 IEMOP_MNEMONIC("fcmovb st0,stN");
13856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13857
13858 IEM_MC_BEGIN(0, 1);
13859 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13860
13861 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13862 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13863
13864 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13865 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
13866 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13867 IEM_MC_ENDIF();
13868 IEM_MC_UPDATE_FPU_OPCODE_IP();
13869 IEM_MC_ELSE()
13870 IEM_MC_FPU_STACK_UNDERFLOW(0);
13871 IEM_MC_ENDIF();
13872 IEM_MC_USED_FPU();
13873 IEM_MC_ADVANCE_RIP();
13874
13875 IEM_MC_END();
13876 return VINF_SUCCESS;
13877}
13878
13879
13880/** Opcode 0xda 11/1. */
13881FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
13882{
13883 IEMOP_MNEMONIC("fcmove st0,stN");
13884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13885
13886 IEM_MC_BEGIN(0, 1);
13887 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13888
13889 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13890 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13891
13892 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13893 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
13894 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13895 IEM_MC_ENDIF();
13896 IEM_MC_UPDATE_FPU_OPCODE_IP();
13897 IEM_MC_ELSE()
13898 IEM_MC_FPU_STACK_UNDERFLOW(0);
13899 IEM_MC_ENDIF();
13900 IEM_MC_USED_FPU();
13901 IEM_MC_ADVANCE_RIP();
13902
13903 IEM_MC_END();
13904 return VINF_SUCCESS;
13905}
13906
13907
13908/** Opcode 0xda 11/2. */
13909FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
13910{
13911 IEMOP_MNEMONIC("fcmovbe st0,stN");
13912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13913
13914 IEM_MC_BEGIN(0, 1);
13915 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13916
13917 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13918 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13919
13920 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13921 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
13922 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13923 IEM_MC_ENDIF();
13924 IEM_MC_UPDATE_FPU_OPCODE_IP();
13925 IEM_MC_ELSE()
13926 IEM_MC_FPU_STACK_UNDERFLOW(0);
13927 IEM_MC_ENDIF();
13928 IEM_MC_USED_FPU();
13929 IEM_MC_ADVANCE_RIP();
13930
13931 IEM_MC_END();
13932 return VINF_SUCCESS;
13933}
13934
13935
13936/** Opcode 0xda 11/3. */
13937FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
13938{
13939 IEMOP_MNEMONIC("fcmovu st0,stN");
13940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13941
13942 IEM_MC_BEGIN(0, 1);
13943 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
13944
13945 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13946 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13947
13948 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
13949 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
13950 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
13951 IEM_MC_ENDIF();
13952 IEM_MC_UPDATE_FPU_OPCODE_IP();
13953 IEM_MC_ELSE()
13954 IEM_MC_FPU_STACK_UNDERFLOW(0);
13955 IEM_MC_ENDIF();
13956 IEM_MC_USED_FPU();
13957 IEM_MC_ADVANCE_RIP();
13958
13959 IEM_MC_END();
13960 return VINF_SUCCESS;
13961}
13962
13963
13964/**
13965 * Common worker for FPU instructions working on ST0 and STn, only affecting
13966 * flags, and popping twice when done.
13967 *
13968 * @param pfnAImpl Pointer to the instruction implementation (assembly).
13969 */
13970FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
13971{
13972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13973
13974 IEM_MC_BEGIN(3, 1);
13975 IEM_MC_LOCAL(uint16_t, u16Fsw);
13976 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
13977 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
13978 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
13979
13980 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
13981 IEM_MC_MAYBE_RAISE_FPU_XCPT();
13982 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
13983 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
13984 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
13985 IEM_MC_ELSE()
13986 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
13987 IEM_MC_ENDIF();
13988 IEM_MC_USED_FPU();
13989 IEM_MC_ADVANCE_RIP();
13990
13991 IEM_MC_END();
13992 return VINF_SUCCESS;
13993}
13994
13995
13996/** Opcode 0xda 0xe9. */
13997FNIEMOP_DEF(iemOp_fucompp)
13998{
13999 IEMOP_MNEMONIC("fucompp st0,stN");
14000 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
14001}
14002
14003
14004/**
14005 * Common worker for FPU instructions working on ST0 and an m32i, and storing
14006 * the result in ST0.
14007 *
14008 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14009 */
14010FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
14011{
14012 IEM_MC_BEGIN(3, 3);
14013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14014 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14015 IEM_MC_LOCAL(int32_t, i32Val2);
14016 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14017 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14018 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14019
14020 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14022
14023 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14024 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14025 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14026
14027 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14028 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
14029 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
14030 IEM_MC_ELSE()
14031 IEM_MC_FPU_STACK_UNDERFLOW(0);
14032 IEM_MC_ENDIF();
14033 IEM_MC_USED_FPU();
14034 IEM_MC_ADVANCE_RIP();
14035
14036 IEM_MC_END();
14037 return VINF_SUCCESS;
14038}
14039
14040
14041/** Opcode 0xda !11/0. */
14042FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
14043{
14044 IEMOP_MNEMONIC("fiadd m32i");
14045 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
14046}
14047
14048
14049/** Opcode 0xda !11/1. */
14050FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
14051{
14052 IEMOP_MNEMONIC("fimul m32i");
14053 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
14054}
14055
14056
14057/** Opcode 0xda !11/2. */
14058FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
14059{
14060 IEMOP_MNEMONIC("ficom st0,m32i");
14061
14062 IEM_MC_BEGIN(3, 3);
14063 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14064 IEM_MC_LOCAL(uint16_t, u16Fsw);
14065 IEM_MC_LOCAL(int32_t, i32Val2);
14066 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14067 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14068 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14069
14070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14072
14073 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14074 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14075 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14076
14077 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14078 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14079 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14080 IEM_MC_ELSE()
14081 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14082 IEM_MC_ENDIF();
14083 IEM_MC_USED_FPU();
14084 IEM_MC_ADVANCE_RIP();
14085
14086 IEM_MC_END();
14087 return VINF_SUCCESS;
14088}
14089
14090
14091/** Opcode 0xda !11/3. */
14092FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
14093{
14094 IEMOP_MNEMONIC("ficomp st0,m32i");
14095
14096 IEM_MC_BEGIN(3, 3);
14097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14098 IEM_MC_LOCAL(uint16_t, u16Fsw);
14099 IEM_MC_LOCAL(int32_t, i32Val2);
14100 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14101 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14102 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
14103
14104 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14106
14107 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14108 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14109 IEM_MC_FETCH_MEM_I32(i32Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14110
14111 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14112 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
14113 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14114 IEM_MC_ELSE()
14115 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14116 IEM_MC_ENDIF();
14117 IEM_MC_USED_FPU();
14118 IEM_MC_ADVANCE_RIP();
14119
14120 IEM_MC_END();
14121 return VINF_SUCCESS;
14122}
14123
14124
14125/** Opcode 0xda !11/4. */
14126FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
14127{
14128 IEMOP_MNEMONIC("fisub m32i");
14129 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
14130}
14131
14132
14133/** Opcode 0xda !11/5. */
14134FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
14135{
14136 IEMOP_MNEMONIC("fisubr m32i");
14137 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
14138}
14139
14140
14141/** Opcode 0xda !11/6. */
14142FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
14143{
14144 IEMOP_MNEMONIC("fidiv m32i");
14145 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
14146}
14147
14148
14149/** Opcode 0xda !11/7. */
14150FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
14151{
14152 IEMOP_MNEMONIC("fidivr m32i");
14153 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
14154}
14155
14156
14157/** Opcode 0xda. */
14158FNIEMOP_DEF(iemOp_EscF2)
14159{
14160 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14162 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14163 {
14164 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14165 {
14166 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
14167 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
14168 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
14169 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
14170 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14171 case 5:
14172 if (bRm == 0xe9)
14173 return FNIEMOP_CALL(iemOp_fucompp);
14174 return IEMOP_RAISE_INVALID_OPCODE();
14175 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14176 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14177 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14178 }
14179 }
14180 else
14181 {
14182 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14183 {
14184 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
14185 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
14186 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
14187 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
14188 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
14189 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
14190 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
14191 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
14192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14193 }
14194 }
14195}
14196
14197
14198/** Opcode 0xdb !11/0. */
14199FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
14200{
14201 IEMOP_MNEMONIC("fild m32i");
14202
14203 IEM_MC_BEGIN(2, 3);
14204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14205 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14206 IEM_MC_LOCAL(int32_t, i32Val);
14207 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14208 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
14209
14210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14212
14213 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14214 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14215 IEM_MC_FETCH_MEM_I32(i32Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14216
14217 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14218 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
14219 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14220 IEM_MC_ELSE()
14221 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14222 IEM_MC_ENDIF();
14223 IEM_MC_USED_FPU();
14224 IEM_MC_ADVANCE_RIP();
14225
14226 IEM_MC_END();
14227 return VINF_SUCCESS;
14228}
14229
14230
14231/** Opcode 0xdb !11/1. */
14232FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
14233{
14234 IEMOP_MNEMONIC("fisttp m32i");
14235 IEM_MC_BEGIN(3, 2);
14236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14237 IEM_MC_LOCAL(uint16_t, u16Fsw);
14238 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14239 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14240 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14241
14242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14244 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14245 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14246
14247 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14248 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14249 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14250 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14251 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14252 IEM_MC_ELSE()
14253 IEM_MC_IF_FCW_IM()
14254 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14255 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14256 IEM_MC_ENDIF();
14257 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14258 IEM_MC_ENDIF();
14259 IEM_MC_USED_FPU();
14260 IEM_MC_ADVANCE_RIP();
14261
14262 IEM_MC_END();
14263 return VINF_SUCCESS;
14264}
14265
14266
14267/** Opcode 0xdb !11/2. */
14268FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
14269{
14270 IEMOP_MNEMONIC("fist m32i");
14271 IEM_MC_BEGIN(3, 2);
14272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14273 IEM_MC_LOCAL(uint16_t, u16Fsw);
14274 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14275 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14276 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14277
14278 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14280 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14281 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14282
14283 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14284 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14285 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14286 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14287 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14288 IEM_MC_ELSE()
14289 IEM_MC_IF_FCW_IM()
14290 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14291 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14292 IEM_MC_ENDIF();
14293 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14294 IEM_MC_ENDIF();
14295 IEM_MC_USED_FPU();
14296 IEM_MC_ADVANCE_RIP();
14297
14298 IEM_MC_END();
14299 return VINF_SUCCESS;
14300}
14301
14302
14303/** Opcode 0xdb !11/3. */
14304FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
14305{
14306 IEMOP_MNEMONIC("fisttp m32i");
14307 IEM_MC_BEGIN(3, 2);
14308 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14309 IEM_MC_LOCAL(uint16_t, u16Fsw);
14310 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14311 IEM_MC_ARG(int32_t *, pi32Dst, 1);
14312 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14313
14314 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14316 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14317 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14318
14319 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14320 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14321 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
14322 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
14323 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14324 IEM_MC_ELSE()
14325 IEM_MC_IF_FCW_IM()
14326 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
14327 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
14328 IEM_MC_ENDIF();
14329 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14330 IEM_MC_ENDIF();
14331 IEM_MC_USED_FPU();
14332 IEM_MC_ADVANCE_RIP();
14333
14334 IEM_MC_END();
14335 return VINF_SUCCESS;
14336}
14337
14338
14339/** Opcode 0xdb !11/5. */
14340FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
14341{
14342 IEMOP_MNEMONIC("fld m80r");
14343
14344 IEM_MC_BEGIN(2, 3);
14345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14346 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14347 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
14348 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14349 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
14350
14351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14353
14354 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14355 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14356 IEM_MC_FETCH_MEM_R80(r80Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14357
14358 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14359 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
14360 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14361 IEM_MC_ELSE()
14362 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14363 IEM_MC_ENDIF();
14364 IEM_MC_USED_FPU();
14365 IEM_MC_ADVANCE_RIP();
14366
14367 IEM_MC_END();
14368 return VINF_SUCCESS;
14369}
14370
14371
14372/** Opcode 0xdb !11/7. */
14373FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
14374{
14375 IEMOP_MNEMONIC("fstp m80r");
14376 IEM_MC_BEGIN(3, 2);
14377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14378 IEM_MC_LOCAL(uint16_t, u16Fsw);
14379 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14380 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
14381 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14382
14383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14385 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14386 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14387
14388 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14389 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14390 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
14391 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
14392 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14393 IEM_MC_ELSE()
14394 IEM_MC_IF_FCW_IM()
14395 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
14396 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
14397 IEM_MC_ENDIF();
14398 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14399 IEM_MC_ENDIF();
14400 IEM_MC_USED_FPU();
14401 IEM_MC_ADVANCE_RIP();
14402
14403 IEM_MC_END();
14404 return VINF_SUCCESS;
14405}
14406
14407
14408/** Opcode 0xdb 11/0. */
14409FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
14410{
14411 IEMOP_MNEMONIC("fcmovnb st0,stN");
14412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14413
14414 IEM_MC_BEGIN(0, 1);
14415 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14416
14417 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14418 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14419
14420 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14421 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
14422 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14423 IEM_MC_ENDIF();
14424 IEM_MC_UPDATE_FPU_OPCODE_IP();
14425 IEM_MC_ELSE()
14426 IEM_MC_FPU_STACK_UNDERFLOW(0);
14427 IEM_MC_ENDIF();
14428 IEM_MC_USED_FPU();
14429 IEM_MC_ADVANCE_RIP();
14430
14431 IEM_MC_END();
14432 return VINF_SUCCESS;
14433}
14434
14435
14436/** Opcode 0xdb 11/1. */
14437FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
14438{
14439 IEMOP_MNEMONIC("fcmovne st0,stN");
14440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14441
14442 IEM_MC_BEGIN(0, 1);
14443 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14444
14445 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14446 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14447
14448 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14449 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
14450 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14451 IEM_MC_ENDIF();
14452 IEM_MC_UPDATE_FPU_OPCODE_IP();
14453 IEM_MC_ELSE()
14454 IEM_MC_FPU_STACK_UNDERFLOW(0);
14455 IEM_MC_ENDIF();
14456 IEM_MC_USED_FPU();
14457 IEM_MC_ADVANCE_RIP();
14458
14459 IEM_MC_END();
14460 return VINF_SUCCESS;
14461}
14462
14463
14464/** Opcode 0xdb 11/2. */
14465FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
14466{
14467 IEMOP_MNEMONIC("fcmovnbe st0,stN");
14468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14469
14470 IEM_MC_BEGIN(0, 1);
14471 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14472
14473 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14474 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14475
14476 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14477 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
14478 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14479 IEM_MC_ENDIF();
14480 IEM_MC_UPDATE_FPU_OPCODE_IP();
14481 IEM_MC_ELSE()
14482 IEM_MC_FPU_STACK_UNDERFLOW(0);
14483 IEM_MC_ENDIF();
14484 IEM_MC_USED_FPU();
14485 IEM_MC_ADVANCE_RIP();
14486
14487 IEM_MC_END();
14488 return VINF_SUCCESS;
14489}
14490
14491
14492/** Opcode 0xdb 11/3. */
14493FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
14494{
14495 IEMOP_MNEMONIC("fcmovnnu st0,stN");
14496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14497
14498 IEM_MC_BEGIN(0, 1);
14499 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
14500
14501 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14502 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14503
14504 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
14505 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
14506 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
14507 IEM_MC_ENDIF();
14508 IEM_MC_UPDATE_FPU_OPCODE_IP();
14509 IEM_MC_ELSE()
14510 IEM_MC_FPU_STACK_UNDERFLOW(0);
14511 IEM_MC_ENDIF();
14512 IEM_MC_USED_FPU();
14513 IEM_MC_ADVANCE_RIP();
14514
14515 IEM_MC_END();
14516 return VINF_SUCCESS;
14517}
14518
14519
14520/** Opcode 0xdb 0xe0. */
14521FNIEMOP_DEF(iemOp_fneni)
14522{
14523 IEMOP_MNEMONIC("fneni (8087/ign)");
14524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14525 IEM_MC_BEGIN(0,0);
14526 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14527 IEM_MC_ADVANCE_RIP();
14528 IEM_MC_END();
14529 return VINF_SUCCESS;
14530}
14531
14532
14533/** Opcode 0xdb 0xe1. */
14534FNIEMOP_DEF(iemOp_fndisi)
14535{
14536 IEMOP_MNEMONIC("fndisi (8087/ign)");
14537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14538 IEM_MC_BEGIN(0,0);
14539 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14540 IEM_MC_ADVANCE_RIP();
14541 IEM_MC_END();
14542 return VINF_SUCCESS;
14543}
14544
14545
14546/** Opcode 0xdb 0xe2. */
14547FNIEMOP_DEF(iemOp_fnclex)
14548{
14549 IEMOP_MNEMONIC("fnclex");
14550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14551
14552 IEM_MC_BEGIN(0,0);
14553 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14554 IEM_MC_CLEAR_FSW_EX();
14555 IEM_MC_ADVANCE_RIP();
14556 IEM_MC_END();
14557 return VINF_SUCCESS;
14558}
14559
14560
14561/** Opcode 0xdb 0xe3. */
14562FNIEMOP_DEF(iemOp_fninit)
14563{
14564 IEMOP_MNEMONIC("fninit");
14565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14566 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
14567}
14568
14569
14570/** Opcode 0xdb 0xe4. */
14571FNIEMOP_DEF(iemOp_fnsetpm)
14572{
14573 IEMOP_MNEMONIC("fnsetpm (80287/ign)"); /* set protected mode on fpu. */
14574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14575 IEM_MC_BEGIN(0,0);
14576 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14577 IEM_MC_ADVANCE_RIP();
14578 IEM_MC_END();
14579 return VINF_SUCCESS;
14580}
14581
14582
14583/** Opcode 0xdb 0xe5. */
14584FNIEMOP_DEF(iemOp_frstpm)
14585{
14586 IEMOP_MNEMONIC("frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
14587#if 0 /* #UDs on newer CPUs */
14588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14589 IEM_MC_BEGIN(0,0);
14590 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14591 IEM_MC_ADVANCE_RIP();
14592 IEM_MC_END();
14593 return VINF_SUCCESS;
14594#else
14595 return IEMOP_RAISE_INVALID_OPCODE();
14596#endif
14597}
14598
14599
14600/** Opcode 0xdb 11/5. */
14601FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
14602{
14603 IEMOP_MNEMONIC("fucomi st0,stN");
14604 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
14605}
14606
14607
14608/** Opcode 0xdb 11/6. */
14609FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
14610{
14611 IEMOP_MNEMONIC("fcomi st0,stN");
14612 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
14613}
14614
14615
14616/** Opcode 0xdb. */
14617FNIEMOP_DEF(iemOp_EscF3)
14618{
14619 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14621 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14622 {
14623 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14624 {
14625 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
14626 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
14627 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
14628 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
14629 case 4:
14630 switch (bRm)
14631 {
14632 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
14633 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
14634 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
14635 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
14636 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
14637 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
14638 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
14639 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
14640 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14641 }
14642 break;
14643 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
14644 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
14645 case 7: return IEMOP_RAISE_INVALID_OPCODE();
14646 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14647 }
14648 }
14649 else
14650 {
14651 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14652 {
14653 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
14654 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
14655 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
14656 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
14657 case 4: return IEMOP_RAISE_INVALID_OPCODE();
14658 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
14659 case 6: return IEMOP_RAISE_INVALID_OPCODE();
14660 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
14661 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14662 }
14663 }
14664}
14665
14666
14667/**
14668 * Common worker for FPU instructions working on STn and ST0, and storing the
14669 * result in STn unless IE, DE or ZE was raised.
14670 *
14671 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14672 */
14673FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
14674{
14675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14676
14677 IEM_MC_BEGIN(3, 1);
14678 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14679 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14680 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14681 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
14682
14683 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14684 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14685
14686 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
14687 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
14688 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
14689 IEM_MC_ELSE()
14690 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
14691 IEM_MC_ENDIF();
14692 IEM_MC_USED_FPU();
14693 IEM_MC_ADVANCE_RIP();
14694
14695 IEM_MC_END();
14696 return VINF_SUCCESS;
14697}
14698
14699
14700/** Opcode 0xdc 11/0. */
14701FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
14702{
14703 IEMOP_MNEMONIC("fadd stN,st0");
14704 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
14705}
14706
14707
14708/** Opcode 0xdc 11/1. */
14709FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
14710{
14711 IEMOP_MNEMONIC("fmul stN,st0");
14712 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
14713}
14714
14715
14716/** Opcode 0xdc 11/4. */
14717FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
14718{
14719 IEMOP_MNEMONIC("fsubr stN,st0");
14720 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
14721}
14722
14723
14724/** Opcode 0xdc 11/5. */
14725FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
14726{
14727 IEMOP_MNEMONIC("fsub stN,st0");
14728 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
14729}
14730
14731
14732/** Opcode 0xdc 11/6. */
14733FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
14734{
14735 IEMOP_MNEMONIC("fdivr stN,st0");
14736 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
14737}
14738
14739
14740/** Opcode 0xdc 11/7. */
14741FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
14742{
14743 IEMOP_MNEMONIC("fdiv stN,st0");
14744 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
14745}
14746
14747
14748/**
14749 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
14750 * memory operand, and storing the result in ST0.
14751 *
14752 * @param pfnAImpl Pointer to the instruction implementation (assembly).
14753 */
14754FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
14755{
14756 IEM_MC_BEGIN(3, 3);
14757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14758 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14759 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
14760 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14761 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
14762 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
14763
14764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14766 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14767 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14768
14769 IEM_MC_FETCH_MEM_R64(r64Factor2, pIemCpu->iEffSeg, GCPtrEffSrc);
14770 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
14771 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
14772 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pIemCpu->iEffSeg, GCPtrEffSrc);
14773 IEM_MC_ELSE()
14774 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pIemCpu->iEffSeg, GCPtrEffSrc);
14775 IEM_MC_ENDIF();
14776 IEM_MC_USED_FPU();
14777 IEM_MC_ADVANCE_RIP();
14778
14779 IEM_MC_END();
14780 return VINF_SUCCESS;
14781}
14782
14783
14784/** Opcode 0xdc !11/0. */
14785FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
14786{
14787 IEMOP_MNEMONIC("fadd m64r");
14788 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
14789}
14790
14791
14792/** Opcode 0xdc !11/1. */
14793FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
14794{
14795 IEMOP_MNEMONIC("fmul m64r");
14796 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
14797}
14798
14799
14800/** Opcode 0xdc !11/2. */
14801FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
14802{
14803 IEMOP_MNEMONIC("fcom st0,m64r");
14804
14805 IEM_MC_BEGIN(3, 3);
14806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14807 IEM_MC_LOCAL(uint16_t, u16Fsw);
14808 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
14809 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14810 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14811 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
14812
14813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14815
14816 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14817 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14818 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14819
14820 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14821 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
14822 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14823 IEM_MC_ELSE()
14824 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14825 IEM_MC_ENDIF();
14826 IEM_MC_USED_FPU();
14827 IEM_MC_ADVANCE_RIP();
14828
14829 IEM_MC_END();
14830 return VINF_SUCCESS;
14831}
14832
14833
14834/** Opcode 0xdc !11/3. */
14835FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
14836{
14837 IEMOP_MNEMONIC("fcomp st0,m64r");
14838
14839 IEM_MC_BEGIN(3, 3);
14840 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14841 IEM_MC_LOCAL(uint16_t, u16Fsw);
14842 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
14843 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14844 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
14845 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
14846
14847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14849
14850 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14851 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14852 IEM_MC_FETCH_MEM_R64(r64Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
14853
14854 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
14855 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
14856 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
14857 IEM_MC_ELSE()
14858 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
14859 IEM_MC_ENDIF();
14860 IEM_MC_USED_FPU();
14861 IEM_MC_ADVANCE_RIP();
14862
14863 IEM_MC_END();
14864 return VINF_SUCCESS;
14865}
14866
14867
14868/** Opcode 0xdc !11/4. */
14869FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
14870{
14871 IEMOP_MNEMONIC("fsub m64r");
14872 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
14873}
14874
14875
14876/** Opcode 0xdc !11/5. */
14877FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
14878{
14879 IEMOP_MNEMONIC("fsubr m64r");
14880 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
14881}
14882
14883
14884/** Opcode 0xdc !11/6. */
14885FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
14886{
14887 IEMOP_MNEMONIC("fdiv m64r");
14888 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
14889}
14890
14891
14892/** Opcode 0xdc !11/7. */
14893FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
14894{
14895 IEMOP_MNEMONIC("fdivr m64r");
14896 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
14897}
14898
14899
14900/** Opcode 0xdc. */
14901FNIEMOP_DEF(iemOp_EscF4)
14902{
14903 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
14904 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14905 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
14906 {
14907 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14908 {
14909 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
14910 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
14911 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
14912 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
14913 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
14914 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
14915 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
14916 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
14917 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14918 }
14919 }
14920 else
14921 {
14922 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
14923 {
14924 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
14925 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
14926 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
14927 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
14928 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
14929 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
14930 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
14931 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
14932 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14933 }
14934 }
14935}
14936
14937
14938/** Opcode 0xdd !11/0.
14939 * @sa iemOp_fld_m32r */
14940FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
14941{
14942 IEMOP_MNEMONIC("fld m64r");
14943
14944 IEM_MC_BEGIN(2, 3);
14945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14946 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
14947 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
14948 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
14949 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
14950
14951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14953 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14954 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14955
14956 IEM_MC_FETCH_MEM_R64(r64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
14957 IEM_MC_IF_FPUREG_IS_EMPTY(7)
14958 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
14959 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
14960 IEM_MC_ELSE()
14961 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
14962 IEM_MC_ENDIF();
14963 IEM_MC_USED_FPU();
14964 IEM_MC_ADVANCE_RIP();
14965
14966 IEM_MC_END();
14967 return VINF_SUCCESS;
14968}
14969
14970
14971/** Opcode 0xdd !11/0. */
14972FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
14973{
14974 IEMOP_MNEMONIC("fisttp m64i");
14975 IEM_MC_BEGIN(3, 2);
14976 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14977 IEM_MC_LOCAL(uint16_t, u16Fsw);
14978 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
14979 IEM_MC_ARG(int64_t *, pi64Dst, 1);
14980 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
14981
14982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14984 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
14985 IEM_MC_MAYBE_RAISE_FPU_XCPT();
14986
14987 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
14988 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
14989 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
14990 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
14991 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
14992 IEM_MC_ELSE()
14993 IEM_MC_IF_FCW_IM()
14994 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
14995 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
14996 IEM_MC_ENDIF();
14997 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
14998 IEM_MC_ENDIF();
14999 IEM_MC_USED_FPU();
15000 IEM_MC_ADVANCE_RIP();
15001
15002 IEM_MC_END();
15003 return VINF_SUCCESS;
15004}
15005
15006
15007/** Opcode 0xdd !11/0. */
15008FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
15009{
15010 IEMOP_MNEMONIC("fst m64r");
15011 IEM_MC_BEGIN(3, 2);
15012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15013 IEM_MC_LOCAL(uint16_t, u16Fsw);
15014 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15015 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15016 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15017
15018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15020 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15021 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15022
15023 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15024 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15025 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15026 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15027 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15028 IEM_MC_ELSE()
15029 IEM_MC_IF_FCW_IM()
15030 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15031 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15032 IEM_MC_ENDIF();
15033 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15034 IEM_MC_ENDIF();
15035 IEM_MC_USED_FPU();
15036 IEM_MC_ADVANCE_RIP();
15037
15038 IEM_MC_END();
15039 return VINF_SUCCESS;
15040}
15041
15042
15043
15044
15045/** Opcode 0xdd !11/0. */
15046FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
15047{
15048 IEMOP_MNEMONIC("fstp m64r");
15049 IEM_MC_BEGIN(3, 2);
15050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15051 IEM_MC_LOCAL(uint16_t, u16Fsw);
15052 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15053 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
15054 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15055
15056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15058 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15059 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15060
15061 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15062 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15063 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
15064 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15065 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15066 IEM_MC_ELSE()
15067 IEM_MC_IF_FCW_IM()
15068 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
15069 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
15070 IEM_MC_ENDIF();
15071 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15072 IEM_MC_ENDIF();
15073 IEM_MC_USED_FPU();
15074 IEM_MC_ADVANCE_RIP();
15075
15076 IEM_MC_END();
15077 return VINF_SUCCESS;
15078}
15079
15080
15081/** Opcode 0xdd !11/0. */
15082FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
15083{
15084 IEMOP_MNEMONIC("frstor m94/108byte");
15085 IEM_MC_BEGIN(3, 0);
15086 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15087 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15088 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
15089 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15091 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15092 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15093 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
15094 IEM_MC_END();
15095 return VINF_SUCCESS;
15096}
15097
15098
15099/** Opcode 0xdd !11/0. */
15100FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
15101{
15102 IEMOP_MNEMONIC("fnsave m94/108byte");
15103 IEM_MC_BEGIN(3, 0);
15104 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pIemCpu->enmEffOpSize, 0);
15105 IEM_MC_ARG(uint8_t, iEffSeg, 1);
15106 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
15107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15109 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15110 IEM_MC_ASSIGN(iEffSeg, pIemCpu->iEffSeg);
15111 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
15112 IEM_MC_END();
15113 return VINF_SUCCESS;
15114
15115}
15116
15117/** Opcode 0xdd !11/0. */
15118FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
15119{
15120 IEMOP_MNEMONIC("fnstsw m16");
15121
15122 IEM_MC_BEGIN(0, 2);
15123 IEM_MC_LOCAL(uint16_t, u16Tmp);
15124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15125
15126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15128 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15129
15130 IEM_MC_FETCH_FSW(u16Tmp);
15131 IEM_MC_STORE_MEM_U16(pIemCpu->iEffSeg, GCPtrEffDst, u16Tmp);
15132 IEM_MC_ADVANCE_RIP();
15133
15134/** @todo Debug / drop a hint to the verifier that things may differ
15135 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
15136 * NT4SP1. (X86_FSW_PE) */
15137 IEM_MC_END();
15138 return VINF_SUCCESS;
15139}
15140
15141
15142/** Opcode 0xdd 11/0. */
15143FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
15144{
15145 IEMOP_MNEMONIC("ffree stN");
15146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15147 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
15148 unmodified. */
15149
15150 IEM_MC_BEGIN(0, 0);
15151
15152 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15153 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15154
15155 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15156 IEM_MC_UPDATE_FPU_OPCODE_IP();
15157
15158 IEM_MC_USED_FPU();
15159 IEM_MC_ADVANCE_RIP();
15160 IEM_MC_END();
15161 return VINF_SUCCESS;
15162}
15163
15164
15165/** Opcode 0xdd 11/1. */
15166FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
15167{
15168 IEMOP_MNEMONIC("fst st0,stN");
15169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15170
15171 IEM_MC_BEGIN(0, 2);
15172 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
15173 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15174 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15175 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15176 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15177 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
15178 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
15179 IEM_MC_ELSE()
15180 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
15181 IEM_MC_ENDIF();
15182 IEM_MC_USED_FPU();
15183 IEM_MC_ADVANCE_RIP();
15184 IEM_MC_END();
15185 return VINF_SUCCESS;
15186}
15187
15188
15189/** Opcode 0xdd 11/3. */
15190FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
15191{
15192 IEMOP_MNEMONIC("fcom st0,stN");
15193 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
15194}
15195
15196
15197/** Opcode 0xdd 11/4. */
15198FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
15199{
15200 IEMOP_MNEMONIC("fcomp st0,stN");
15201 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
15202}
15203
15204
15205/** Opcode 0xdd. */
15206FNIEMOP_DEF(iemOp_EscF5)
15207{
15208 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15209 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15210 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15211 {
15212 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15213 {
15214 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
15215 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
15216 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
15217 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
15218 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
15219 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
15220 case 6: return IEMOP_RAISE_INVALID_OPCODE();
15221 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15222 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15223 }
15224 }
15225 else
15226 {
15227 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15228 {
15229 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
15230 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
15231 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
15232 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
15233 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
15234 case 5: return IEMOP_RAISE_INVALID_OPCODE();
15235 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
15236 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
15237 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15238 }
15239 }
15240}
15241
15242
15243/** Opcode 0xde 11/0. */
15244FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
15245{
15246 IEMOP_MNEMONIC("faddp stN,st0");
15247 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
15248}
15249
15250
15251/** Opcode 0xde 11/0. */
15252FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
15253{
15254 IEMOP_MNEMONIC("fmulp stN,st0");
15255 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
15256}
15257
15258
15259/** Opcode 0xde 0xd9. */
15260FNIEMOP_DEF(iemOp_fcompp)
15261{
15262 IEMOP_MNEMONIC("fucompp st0,stN");
15263 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
15264}
15265
15266
15267/** Opcode 0xde 11/4. */
15268FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
15269{
15270 IEMOP_MNEMONIC("fsubrp stN,st0");
15271 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
15272}
15273
15274
15275/** Opcode 0xde 11/5. */
15276FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
15277{
15278 IEMOP_MNEMONIC("fsubp stN,st0");
15279 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
15280}
15281
15282
15283/** Opcode 0xde 11/6. */
15284FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
15285{
15286 IEMOP_MNEMONIC("fdivrp stN,st0");
15287 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
15288}
15289
15290
15291/** Opcode 0xde 11/7. */
15292FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
15293{
15294 IEMOP_MNEMONIC("fdivp stN,st0");
15295 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
15296}
15297
15298
15299/**
15300 * Common worker for FPU instructions working on ST0 and an m16i, and storing
15301 * the result in ST0.
15302 *
15303 * @param pfnAImpl Pointer to the instruction implementation (assembly).
15304 */
15305FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
15306{
15307 IEM_MC_BEGIN(3, 3);
15308 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15309 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15310 IEM_MC_LOCAL(int16_t, i16Val2);
15311 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15312 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15313 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15314
15315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15317
15318 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15319 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15320 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15321
15322 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15323 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
15324 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
15325 IEM_MC_ELSE()
15326 IEM_MC_FPU_STACK_UNDERFLOW(0);
15327 IEM_MC_ENDIF();
15328 IEM_MC_USED_FPU();
15329 IEM_MC_ADVANCE_RIP();
15330
15331 IEM_MC_END();
15332 return VINF_SUCCESS;
15333}
15334
15335
15336/** Opcode 0xde !11/0. */
15337FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
15338{
15339 IEMOP_MNEMONIC("fiadd m16i");
15340 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
15341}
15342
15343
15344/** Opcode 0xde !11/1. */
15345FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
15346{
15347 IEMOP_MNEMONIC("fimul m16i");
15348 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
15349}
15350
15351
15352/** Opcode 0xde !11/2. */
15353FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
15354{
15355 IEMOP_MNEMONIC("ficom st0,m16i");
15356
15357 IEM_MC_BEGIN(3, 3);
15358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15359 IEM_MC_LOCAL(uint16_t, u16Fsw);
15360 IEM_MC_LOCAL(int16_t, i16Val2);
15361 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15362 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15363 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15364
15365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15367
15368 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15369 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15370 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15371
15372 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15373 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15374 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15375 IEM_MC_ELSE()
15376 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15377 IEM_MC_ENDIF();
15378 IEM_MC_USED_FPU();
15379 IEM_MC_ADVANCE_RIP();
15380
15381 IEM_MC_END();
15382 return VINF_SUCCESS;
15383}
15384
15385
15386/** Opcode 0xde !11/3. */
15387FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
15388{
15389 IEMOP_MNEMONIC("ficomp st0,m16i");
15390
15391 IEM_MC_BEGIN(3, 3);
15392 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15393 IEM_MC_LOCAL(uint16_t, u16Fsw);
15394 IEM_MC_LOCAL(int16_t, i16Val2);
15395 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15396 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
15397 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
15398
15399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15401
15402 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15403 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15404 IEM_MC_FETCH_MEM_I16(i16Val2, pIemCpu->iEffSeg, GCPtrEffSrc);
15405
15406 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
15407 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
15408 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffSrc);
15409 IEM_MC_ELSE()
15410 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffSrc);
15411 IEM_MC_ENDIF();
15412 IEM_MC_USED_FPU();
15413 IEM_MC_ADVANCE_RIP();
15414
15415 IEM_MC_END();
15416 return VINF_SUCCESS;
15417}
15418
15419
15420/** Opcode 0xde !11/4. */
15421FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
15422{
15423 IEMOP_MNEMONIC("fisub m16i");
15424 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
15425}
15426
15427
15428/** Opcode 0xde !11/5. */
15429FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
15430{
15431 IEMOP_MNEMONIC("fisubr m16i");
15432 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
15433}
15434
15435
15436/** Opcode 0xde !11/6. */
15437FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
15438{
15439 IEMOP_MNEMONIC("fiadd m16i");
15440 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
15441}
15442
15443
15444/** Opcode 0xde !11/7. */
15445FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
15446{
15447 IEMOP_MNEMONIC("fiadd m16i");
15448 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
15449}
15450
15451
15452/** Opcode 0xde. */
15453FNIEMOP_DEF(iemOp_EscF6)
15454{
15455 pIemCpu->offFpuOpcode = pIemCpu->offOpcode - 1;
15456 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15457 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15458 {
15459 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15460 {
15461 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
15462 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
15463 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
15464 case 3: if (bRm == 0xd9)
15465 return FNIEMOP_CALL(iemOp_fcompp);
15466 return IEMOP_RAISE_INVALID_OPCODE();
15467 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
15468 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
15469 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
15470 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
15471 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15472 }
15473 }
15474 else
15475 {
15476 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15477 {
15478 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
15479 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
15480 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
15481 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
15482 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
15483 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
15484 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
15485 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
15486 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15487 }
15488 }
15489}
15490
15491
15492/** Opcode 0xdf 11/0.
15493 * Undocument instruction, assumed to work like ffree + fincstp. */
15494FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
15495{
15496 IEMOP_MNEMONIC("ffreep stN");
15497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15498
15499 IEM_MC_BEGIN(0, 0);
15500
15501 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15502 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15503
15504 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
15505 IEM_MC_FPU_STACK_INC_TOP();
15506 IEM_MC_UPDATE_FPU_OPCODE_IP();
15507
15508 IEM_MC_USED_FPU();
15509 IEM_MC_ADVANCE_RIP();
15510 IEM_MC_END();
15511 return VINF_SUCCESS;
15512}
15513
15514
15515/** Opcode 0xdf 0xe0. */
15516FNIEMOP_DEF(iemOp_fnstsw_ax)
15517{
15518 IEMOP_MNEMONIC("fnstsw ax");
15519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15520
15521 IEM_MC_BEGIN(0, 1);
15522 IEM_MC_LOCAL(uint16_t, u16Tmp);
15523 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15524 IEM_MC_FETCH_FSW(u16Tmp);
15525 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
15526 IEM_MC_ADVANCE_RIP();
15527 IEM_MC_END();
15528 return VINF_SUCCESS;
15529}
15530
15531
15532/** Opcode 0xdf 11/5. */
15533FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
15534{
15535 IEMOP_MNEMONIC("fcomip st0,stN");
15536 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15537}
15538
15539
15540/** Opcode 0xdf 11/6. */
15541FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
15542{
15543 IEMOP_MNEMONIC("fcomip st0,stN");
15544 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
15545}
15546
15547
15548/** Opcode 0xdf !11/0. */
15549FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
15550{
15551 IEMOP_MNEMONIC("fild m16i");
15552
15553 IEM_MC_BEGIN(2, 3);
15554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15555 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15556 IEM_MC_LOCAL(int16_t, i16Val);
15557 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15558 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
15559
15560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15562
15563 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15564 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15565 IEM_MC_FETCH_MEM_I16(i16Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15566
15567 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15568 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
15569 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15570 IEM_MC_ELSE()
15571 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15572 IEM_MC_ENDIF();
15573 IEM_MC_USED_FPU();
15574 IEM_MC_ADVANCE_RIP();
15575
15576 IEM_MC_END();
15577 return VINF_SUCCESS;
15578}
15579
15580
15581/** Opcode 0xdf !11/1. */
15582FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
15583{
15584 IEMOP_MNEMONIC("fisttp m16i");
15585 IEM_MC_BEGIN(3, 2);
15586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15587 IEM_MC_LOCAL(uint16_t, u16Fsw);
15588 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15589 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15590 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15591
15592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15594 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15595 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15596
15597 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15598 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15599 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15600 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15601 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15602 IEM_MC_ELSE()
15603 IEM_MC_IF_FCW_IM()
15604 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15605 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15606 IEM_MC_ENDIF();
15607 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15608 IEM_MC_ENDIF();
15609 IEM_MC_USED_FPU();
15610 IEM_MC_ADVANCE_RIP();
15611
15612 IEM_MC_END();
15613 return VINF_SUCCESS;
15614}
15615
15616
15617/** Opcode 0xdf !11/2. */
15618FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
15619{
15620 IEMOP_MNEMONIC("fistp m16i");
15621 IEM_MC_BEGIN(3, 2);
15622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15623 IEM_MC_LOCAL(uint16_t, u16Fsw);
15624 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15625 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15626 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15627
15628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15630 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15631 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15632
15633 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15634 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15635 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15636 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15637 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15638 IEM_MC_ELSE()
15639 IEM_MC_IF_FCW_IM()
15640 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15641 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15642 IEM_MC_ENDIF();
15643 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15644 IEM_MC_ENDIF();
15645 IEM_MC_USED_FPU();
15646 IEM_MC_ADVANCE_RIP();
15647
15648 IEM_MC_END();
15649 return VINF_SUCCESS;
15650}
15651
15652
15653/** Opcode 0xdf !11/3. */
15654FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
15655{
15656 IEMOP_MNEMONIC("fistp m16i");
15657 IEM_MC_BEGIN(3, 2);
15658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15659 IEM_MC_LOCAL(uint16_t, u16Fsw);
15660 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15661 IEM_MC_ARG(int16_t *, pi16Dst, 1);
15662 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15663
15664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15666 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15667 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15668
15669 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15670 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15671 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
15672 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
15673 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15674 IEM_MC_ELSE()
15675 IEM_MC_IF_FCW_IM()
15676 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
15677 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
15678 IEM_MC_ENDIF();
15679 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15680 IEM_MC_ENDIF();
15681 IEM_MC_USED_FPU();
15682 IEM_MC_ADVANCE_RIP();
15683
15684 IEM_MC_END();
15685 return VINF_SUCCESS;
15686}
15687
15688
15689/** Opcode 0xdf !11/4. */
15690FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
15691
15692
15693/** Opcode 0xdf !11/5. */
15694FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
15695{
15696 IEMOP_MNEMONIC("fild m64i");
15697
15698 IEM_MC_BEGIN(2, 3);
15699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
15700 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
15701 IEM_MC_LOCAL(int64_t, i64Val);
15702 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
15703 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
15704
15705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
15706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15707
15708 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15709 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15710 IEM_MC_FETCH_MEM_I64(i64Val, pIemCpu->iEffSeg, GCPtrEffSrc);
15711
15712 IEM_MC_IF_FPUREG_IS_EMPTY(7)
15713 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
15714 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pIemCpu->iEffSeg, GCPtrEffSrc);
15715 IEM_MC_ELSE()
15716 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pIemCpu->iEffSeg, GCPtrEffSrc);
15717 IEM_MC_ENDIF();
15718 IEM_MC_USED_FPU();
15719 IEM_MC_ADVANCE_RIP();
15720
15721 IEM_MC_END();
15722 return VINF_SUCCESS;
15723}
15724
15725
15726/** Opcode 0xdf !11/6. */
15727FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
15728
15729
15730/** Opcode 0xdf !11/7. */
15731FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
15732{
15733 IEMOP_MNEMONIC("fistp m64i");
15734 IEM_MC_BEGIN(3, 2);
15735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
15736 IEM_MC_LOCAL(uint16_t, u16Fsw);
15737 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
15738 IEM_MC_ARG(int64_t *, pi64Dst, 1);
15739 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
15740
15741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
15742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
15743 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
15744 IEM_MC_MAYBE_RAISE_FPU_XCPT();
15745
15746 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pIemCpu->iEffSeg, GCPtrEffDst, 1 /*arg*/);
15747 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
15748 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
15749 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
15750 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pIemCpu->iEffSeg, GCPtrEffDst);
15751 IEM_MC_ELSE()
15752 IEM_MC_IF_FCW_IM()
15753 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
15754 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
15755 IEM_MC_ENDIF();
15756 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pIemCpu->iEffSeg, GCPtrEffDst);
15757 IEM_MC_ENDIF();
15758 IEM_MC_USED_FPU();
15759 IEM_MC_ADVANCE_RIP();
15760
15761 IEM_MC_END();
15762 return VINF_SUCCESS;
15763}
15764
15765
15766/** Opcode 0xdf. */
15767FNIEMOP_DEF(iemOp_EscF7)
15768{
15769 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
15770 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
15771 {
15772 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15773 {
15774 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
15775 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
15776 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
15777 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
15778 case 4: if (bRm == 0xe0)
15779 return FNIEMOP_CALL(iemOp_fnstsw_ax);
15780 return IEMOP_RAISE_INVALID_OPCODE();
15781 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
15782 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
15783 case 7: return IEMOP_RAISE_INVALID_OPCODE();
15784 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15785 }
15786 }
15787 else
15788 {
15789 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
15790 {
15791 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
15792 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
15793 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
15794 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
15795 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
15796 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
15797 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
15798 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
15799 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15800 }
15801 }
15802}
15803
15804
15805/** Opcode 0xe0. */
15806FNIEMOP_DEF(iemOp_loopne_Jb)
15807{
15808 IEMOP_MNEMONIC("loopne Jb");
15809 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15810 IEMOP_HLP_NO_LOCK_PREFIX();
15811 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15812
15813 switch (pIemCpu->enmEffAddrMode)
15814 {
15815 case IEMMODE_16BIT:
15816 IEM_MC_BEGIN(0,0);
15817 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
15818 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15819 IEM_MC_REL_JMP_S8(i8Imm);
15820 } IEM_MC_ELSE() {
15821 IEM_MC_ADVANCE_RIP();
15822 } IEM_MC_ENDIF();
15823 IEM_MC_END();
15824 return VINF_SUCCESS;
15825
15826 case IEMMODE_32BIT:
15827 IEM_MC_BEGIN(0,0);
15828 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
15829 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15830 IEM_MC_REL_JMP_S8(i8Imm);
15831 } IEM_MC_ELSE() {
15832 IEM_MC_ADVANCE_RIP();
15833 } IEM_MC_ENDIF();
15834 IEM_MC_END();
15835 return VINF_SUCCESS;
15836
15837 case IEMMODE_64BIT:
15838 IEM_MC_BEGIN(0,0);
15839 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
15840 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
15841 IEM_MC_REL_JMP_S8(i8Imm);
15842 } IEM_MC_ELSE() {
15843 IEM_MC_ADVANCE_RIP();
15844 } IEM_MC_ENDIF();
15845 IEM_MC_END();
15846 return VINF_SUCCESS;
15847
15848 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15849 }
15850}
15851
15852
15853/** Opcode 0xe1. */
15854FNIEMOP_DEF(iemOp_loope_Jb)
15855{
15856 IEMOP_MNEMONIC("loope Jb");
15857 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15858 IEMOP_HLP_NO_LOCK_PREFIX();
15859 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15860
15861 switch (pIemCpu->enmEffAddrMode)
15862 {
15863 case IEMMODE_16BIT:
15864 IEM_MC_BEGIN(0,0);
15865 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
15866 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
15867 IEM_MC_REL_JMP_S8(i8Imm);
15868 } IEM_MC_ELSE() {
15869 IEM_MC_ADVANCE_RIP();
15870 } IEM_MC_ENDIF();
15871 IEM_MC_END();
15872 return VINF_SUCCESS;
15873
15874 case IEMMODE_32BIT:
15875 IEM_MC_BEGIN(0,0);
15876 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
15877 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
15878 IEM_MC_REL_JMP_S8(i8Imm);
15879 } IEM_MC_ELSE() {
15880 IEM_MC_ADVANCE_RIP();
15881 } IEM_MC_ENDIF();
15882 IEM_MC_END();
15883 return VINF_SUCCESS;
15884
15885 case IEMMODE_64BIT:
15886 IEM_MC_BEGIN(0,0);
15887 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
15888 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
15889 IEM_MC_REL_JMP_S8(i8Imm);
15890 } IEM_MC_ELSE() {
15891 IEM_MC_ADVANCE_RIP();
15892 } IEM_MC_ENDIF();
15893 IEM_MC_END();
15894 return VINF_SUCCESS;
15895
15896 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15897 }
15898}
15899
15900
15901/** Opcode 0xe2. */
15902FNIEMOP_DEF(iemOp_loop_Jb)
15903{
15904 IEMOP_MNEMONIC("loop Jb");
15905 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15906 IEMOP_HLP_NO_LOCK_PREFIX();
15907 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15908
15909 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
15910 * using the 32-bit operand size override. How can that be restarted? See
15911 * weird pseudo code in intel manual. */
15912 switch (pIemCpu->enmEffAddrMode)
15913 {
15914 case IEMMODE_16BIT:
15915 IEM_MC_BEGIN(0,0);
15916 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
15917 {
15918 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
15919 IEM_MC_IF_CX_IS_NZ() {
15920 IEM_MC_REL_JMP_S8(i8Imm);
15921 } IEM_MC_ELSE() {
15922 IEM_MC_ADVANCE_RIP();
15923 } IEM_MC_ENDIF();
15924 }
15925 else
15926 {
15927 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
15928 IEM_MC_ADVANCE_RIP();
15929 }
15930 IEM_MC_END();
15931 return VINF_SUCCESS;
15932
15933 case IEMMODE_32BIT:
15934 IEM_MC_BEGIN(0,0);
15935 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
15936 {
15937 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
15938 IEM_MC_IF_ECX_IS_NZ() {
15939 IEM_MC_REL_JMP_S8(i8Imm);
15940 } IEM_MC_ELSE() {
15941 IEM_MC_ADVANCE_RIP();
15942 } IEM_MC_ENDIF();
15943 }
15944 else
15945 {
15946 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
15947 IEM_MC_ADVANCE_RIP();
15948 }
15949 IEM_MC_END();
15950 return VINF_SUCCESS;
15951
15952 case IEMMODE_64BIT:
15953 IEM_MC_BEGIN(0,0);
15954 if (-(int8_t)pIemCpu->offOpcode != i8Imm)
15955 {
15956 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
15957 IEM_MC_IF_RCX_IS_NZ() {
15958 IEM_MC_REL_JMP_S8(i8Imm);
15959 } IEM_MC_ELSE() {
15960 IEM_MC_ADVANCE_RIP();
15961 } IEM_MC_ENDIF();
15962 }
15963 else
15964 {
15965 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
15966 IEM_MC_ADVANCE_RIP();
15967 }
15968 IEM_MC_END();
15969 return VINF_SUCCESS;
15970
15971 IEM_NOT_REACHED_DEFAULT_CASE_RET();
15972 }
15973}
15974
15975
15976/** Opcode 0xe3. */
15977FNIEMOP_DEF(iemOp_jecxz_Jb)
15978{
15979 IEMOP_MNEMONIC("jecxz Jb");
15980 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
15981 IEMOP_HLP_NO_LOCK_PREFIX();
15982 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
15983
15984 switch (pIemCpu->enmEffAddrMode)
15985 {
15986 case IEMMODE_16BIT:
15987 IEM_MC_BEGIN(0,0);
15988 IEM_MC_IF_CX_IS_NZ() {
15989 IEM_MC_ADVANCE_RIP();
15990 } IEM_MC_ELSE() {
15991 IEM_MC_REL_JMP_S8(i8Imm);
15992 } IEM_MC_ENDIF();
15993 IEM_MC_END();
15994 return VINF_SUCCESS;
15995
15996 case IEMMODE_32BIT:
15997 IEM_MC_BEGIN(0,0);
15998 IEM_MC_IF_ECX_IS_NZ() {
15999 IEM_MC_ADVANCE_RIP();
16000 } IEM_MC_ELSE() {
16001 IEM_MC_REL_JMP_S8(i8Imm);
16002 } IEM_MC_ENDIF();
16003 IEM_MC_END();
16004 return VINF_SUCCESS;
16005
16006 case IEMMODE_64BIT:
16007 IEM_MC_BEGIN(0,0);
16008 IEM_MC_IF_RCX_IS_NZ() {
16009 IEM_MC_ADVANCE_RIP();
16010 } IEM_MC_ELSE() {
16011 IEM_MC_REL_JMP_S8(i8Imm);
16012 } IEM_MC_ENDIF();
16013 IEM_MC_END();
16014 return VINF_SUCCESS;
16015
16016 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16017 }
16018}
16019
16020
16021/** Opcode 0xe4 */
16022FNIEMOP_DEF(iemOp_in_AL_Ib)
16023{
16024 IEMOP_MNEMONIC("in eAX,Ib");
16025 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16026 IEMOP_HLP_NO_LOCK_PREFIX();
16027 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
16028}
16029
16030
16031/** Opcode 0xe5 */
16032FNIEMOP_DEF(iemOp_in_eAX_Ib)
16033{
16034 IEMOP_MNEMONIC("in eAX,Ib");
16035 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16036 IEMOP_HLP_NO_LOCK_PREFIX();
16037 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16038}
16039
16040
16041/** Opcode 0xe6 */
16042FNIEMOP_DEF(iemOp_out_Ib_AL)
16043{
16044 IEMOP_MNEMONIC("out Ib,AL");
16045 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16046 IEMOP_HLP_NO_LOCK_PREFIX();
16047 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
16048}
16049
16050
16051/** Opcode 0xe7 */
16052FNIEMOP_DEF(iemOp_out_Ib_eAX)
16053{
16054 IEMOP_MNEMONIC("out Ib,eAX");
16055 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16056 IEMOP_HLP_NO_LOCK_PREFIX();
16057 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16058}
16059
16060
16061/** Opcode 0xe8. */
16062FNIEMOP_DEF(iemOp_call_Jv)
16063{
16064 IEMOP_MNEMONIC("call Jv");
16065 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16066 switch (pIemCpu->enmEffOpSize)
16067 {
16068 case IEMMODE_16BIT:
16069 {
16070 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16071 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
16072 }
16073
16074 case IEMMODE_32BIT:
16075 {
16076 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16077 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
16078 }
16079
16080 case IEMMODE_64BIT:
16081 {
16082 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16083 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
16084 }
16085
16086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16087 }
16088}
16089
16090
16091/** Opcode 0xe9. */
16092FNIEMOP_DEF(iemOp_jmp_Jv)
16093{
16094 IEMOP_MNEMONIC("jmp Jv");
16095 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16096 switch (pIemCpu->enmEffOpSize)
16097 {
16098 case IEMMODE_16BIT:
16099 {
16100 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
16101 IEM_MC_BEGIN(0, 0);
16102 IEM_MC_REL_JMP_S16(i16Imm);
16103 IEM_MC_END();
16104 return VINF_SUCCESS;
16105 }
16106
16107 case IEMMODE_64BIT:
16108 case IEMMODE_32BIT:
16109 {
16110 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
16111 IEM_MC_BEGIN(0, 0);
16112 IEM_MC_REL_JMP_S32(i32Imm);
16113 IEM_MC_END();
16114 return VINF_SUCCESS;
16115 }
16116
16117 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16118 }
16119}
16120
16121
16122/** Opcode 0xea. */
16123FNIEMOP_DEF(iemOp_jmp_Ap)
16124{
16125 IEMOP_MNEMONIC("jmp Ap");
16126 IEMOP_HLP_NO_64BIT();
16127
16128 /* Decode the far pointer address and pass it on to the far call C implementation. */
16129 uint32_t offSeg;
16130 if (pIemCpu->enmEffOpSize != IEMMODE_16BIT)
16131 IEM_OPCODE_GET_NEXT_U32(&offSeg);
16132 else
16133 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
16134 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
16135 IEMOP_HLP_NO_LOCK_PREFIX();
16136 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pIemCpu->enmEffOpSize);
16137}
16138
16139
16140/** Opcode 0xeb. */
16141FNIEMOP_DEF(iemOp_jmp_Jb)
16142{
16143 IEMOP_MNEMONIC("jmp Jb");
16144 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
16145 IEMOP_HLP_NO_LOCK_PREFIX();
16146 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16147
16148 IEM_MC_BEGIN(0, 0);
16149 IEM_MC_REL_JMP_S8(i8Imm);
16150 IEM_MC_END();
16151 return VINF_SUCCESS;
16152}
16153
16154
16155/** Opcode 0xec */
16156FNIEMOP_DEF(iemOp_in_AL_DX)
16157{
16158 IEMOP_MNEMONIC("in AL,DX");
16159 IEMOP_HLP_NO_LOCK_PREFIX();
16160 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
16161}
16162
16163
16164/** Opcode 0xed */
16165FNIEMOP_DEF(iemOp_eAX_DX)
16166{
16167 IEMOP_MNEMONIC("in eAX,DX");
16168 IEMOP_HLP_NO_LOCK_PREFIX();
16169 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16170}
16171
16172
16173/** Opcode 0xee */
16174FNIEMOP_DEF(iemOp_out_DX_AL)
16175{
16176 IEMOP_MNEMONIC("out DX,AL");
16177 IEMOP_HLP_NO_LOCK_PREFIX();
16178 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
16179}
16180
16181
16182/** Opcode 0xef */
16183FNIEMOP_DEF(iemOp_out_DX_eAX)
16184{
16185 IEMOP_MNEMONIC("out DX,eAX");
16186 IEMOP_HLP_NO_LOCK_PREFIX();
16187 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pIemCpu->enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
16188}
16189
16190
16191/** Opcode 0xf0. */
16192FNIEMOP_DEF(iemOp_lock)
16193{
16194 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
16195 pIemCpu->fPrefixes |= IEM_OP_PRF_LOCK;
16196
16197 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16198 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16199}
16200
16201
16202/** Opcode 0xf1. */
16203FNIEMOP_DEF(iemOp_int_1)
16204{
16205 IEMOP_MNEMONIC("int1"); /* icebp */
16206 /** @todo testcase! */
16207 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
16208}
16209
16210
16211/** Opcode 0xf2. */
16212FNIEMOP_DEF(iemOp_repne)
16213{
16214 /* This overrides any previous REPE prefix. */
16215 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPZ;
16216 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
16217 pIemCpu->fPrefixes |= IEM_OP_PRF_REPNZ;
16218
16219 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16220 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16221}
16222
16223
16224/** Opcode 0xf3. */
16225FNIEMOP_DEF(iemOp_repe)
16226{
16227 /* This overrides any previous REPNE prefix. */
16228 pIemCpu->fPrefixes &= ~IEM_OP_PRF_REPNZ;
16229 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
16230 pIemCpu->fPrefixes |= IEM_OP_PRF_REPZ;
16231
16232 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
16233 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
16234}
16235
16236
16237/** Opcode 0xf4. */
16238FNIEMOP_DEF(iemOp_hlt)
16239{
16240 IEMOP_HLP_NO_LOCK_PREFIX();
16241 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
16242}
16243
16244
16245/** Opcode 0xf5. */
16246FNIEMOP_DEF(iemOp_cmc)
16247{
16248 IEMOP_MNEMONIC("cmc");
16249 IEMOP_HLP_NO_LOCK_PREFIX();
16250 IEM_MC_BEGIN(0, 0);
16251 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
16252 IEM_MC_ADVANCE_RIP();
16253 IEM_MC_END();
16254 return VINF_SUCCESS;
16255}
16256
16257
16258/**
16259 * Common implementation of 'inc/dec/not/neg Eb'.
16260 *
16261 * @param bRm The RM byte.
16262 * @param pImpl The instruction implementation.
16263 */
16264FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16265{
16266 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16267 {
16268 /* register access */
16269 IEM_MC_BEGIN(2, 0);
16270 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16271 IEM_MC_ARG(uint32_t *, pEFlags, 1);
16272 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16273 IEM_MC_REF_EFLAGS(pEFlags);
16274 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16275 IEM_MC_ADVANCE_RIP();
16276 IEM_MC_END();
16277 }
16278 else
16279 {
16280 /* memory access. */
16281 IEM_MC_BEGIN(2, 2);
16282 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16283 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16285
16286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16287 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16288 IEM_MC_FETCH_EFLAGS(EFlags);
16289 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16290 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
16291 else
16292 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
16293
16294 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
16295 IEM_MC_COMMIT_EFLAGS(EFlags);
16296 IEM_MC_ADVANCE_RIP();
16297 IEM_MC_END();
16298 }
16299 return VINF_SUCCESS;
16300}
16301
16302
16303/**
16304 * Common implementation of 'inc/dec/not/neg Ev'.
16305 *
16306 * @param bRm The RM byte.
16307 * @param pImpl The instruction implementation.
16308 */
16309FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
16310{
16311 /* Registers are handled by a common worker. */
16312 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16313 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16314
16315 /* Memory we do here. */
16316 switch (pIemCpu->enmEffOpSize)
16317 {
16318 case IEMMODE_16BIT:
16319 IEM_MC_BEGIN(2, 2);
16320 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16321 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16323
16324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16325 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16326 IEM_MC_FETCH_EFLAGS(EFlags);
16327 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16328 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
16329 else
16330 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
16331
16332 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
16333 IEM_MC_COMMIT_EFLAGS(EFlags);
16334 IEM_MC_ADVANCE_RIP();
16335 IEM_MC_END();
16336 return VINF_SUCCESS;
16337
16338 case IEMMODE_32BIT:
16339 IEM_MC_BEGIN(2, 2);
16340 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16341 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16342 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16343
16344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16345 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16346 IEM_MC_FETCH_EFLAGS(EFlags);
16347 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16348 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
16349 else
16350 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
16351
16352 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
16353 IEM_MC_COMMIT_EFLAGS(EFlags);
16354 IEM_MC_ADVANCE_RIP();
16355 IEM_MC_END();
16356 return VINF_SUCCESS;
16357
16358 case IEMMODE_64BIT:
16359 IEM_MC_BEGIN(2, 2);
16360 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16361 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
16362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16363
16364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16365 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16366 IEM_MC_FETCH_EFLAGS(EFlags);
16367 if (!(pIemCpu->fPrefixes & IEM_OP_PRF_LOCK))
16368 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
16369 else
16370 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
16371
16372 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
16373 IEM_MC_COMMIT_EFLAGS(EFlags);
16374 IEM_MC_ADVANCE_RIP();
16375 IEM_MC_END();
16376 return VINF_SUCCESS;
16377
16378 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16379 }
16380}
16381
16382
16383/** Opcode 0xf6 /0. */
16384FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
16385{
16386 IEMOP_MNEMONIC("test Eb,Ib");
16387 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16388
16389 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16390 {
16391 /* register access */
16392 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16393 IEMOP_HLP_NO_LOCK_PREFIX();
16394
16395 IEM_MC_BEGIN(3, 0);
16396 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16397 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
16398 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16399 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16400 IEM_MC_REF_EFLAGS(pEFlags);
16401 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16402 IEM_MC_ADVANCE_RIP();
16403 IEM_MC_END();
16404 }
16405 else
16406 {
16407 /* memory access. */
16408 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16409
16410 IEM_MC_BEGIN(3, 2);
16411 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
16412 IEM_MC_ARG(uint8_t, u8Src, 1);
16413 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16415
16416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
16417 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
16418 IEM_MC_ASSIGN(u8Src, u8Imm);
16419 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16420 IEM_MC_FETCH_EFLAGS(EFlags);
16421 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
16422
16423 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
16424 IEM_MC_COMMIT_EFLAGS(EFlags);
16425 IEM_MC_ADVANCE_RIP();
16426 IEM_MC_END();
16427 }
16428 return VINF_SUCCESS;
16429}
16430
16431
16432/** Opcode 0xf7 /0. */
16433FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
16434{
16435 IEMOP_MNEMONIC("test Ev,Iv");
16436 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16437 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
16438
16439 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16440 {
16441 /* register access */
16442 switch (pIemCpu->enmEffOpSize)
16443 {
16444 case IEMMODE_16BIT:
16445 {
16446 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16447 IEM_MC_BEGIN(3, 0);
16448 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16449 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
16450 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16451 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16452 IEM_MC_REF_EFLAGS(pEFlags);
16453 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16454 IEM_MC_ADVANCE_RIP();
16455 IEM_MC_END();
16456 return VINF_SUCCESS;
16457 }
16458
16459 case IEMMODE_32BIT:
16460 {
16461 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16462 IEM_MC_BEGIN(3, 0);
16463 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16464 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
16465 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16466 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16467 IEM_MC_REF_EFLAGS(pEFlags);
16468 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16469 /* No clearing the high dword here - test doesn't write back the result. */
16470 IEM_MC_ADVANCE_RIP();
16471 IEM_MC_END();
16472 return VINF_SUCCESS;
16473 }
16474
16475 case IEMMODE_64BIT:
16476 {
16477 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16478 IEM_MC_BEGIN(3, 0);
16479 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16480 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
16481 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16482 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16483 IEM_MC_REF_EFLAGS(pEFlags);
16484 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16485 IEM_MC_ADVANCE_RIP();
16486 IEM_MC_END();
16487 return VINF_SUCCESS;
16488 }
16489
16490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16491 }
16492 }
16493 else
16494 {
16495 /* memory access. */
16496 switch (pIemCpu->enmEffOpSize)
16497 {
16498 case IEMMODE_16BIT:
16499 {
16500 IEM_MC_BEGIN(3, 2);
16501 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
16502 IEM_MC_ARG(uint16_t, u16Src, 1);
16503 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16505
16506 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
16507 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
16508 IEM_MC_ASSIGN(u16Src, u16Imm);
16509 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16510 IEM_MC_FETCH_EFLAGS(EFlags);
16511 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
16512
16513 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
16514 IEM_MC_COMMIT_EFLAGS(EFlags);
16515 IEM_MC_ADVANCE_RIP();
16516 IEM_MC_END();
16517 return VINF_SUCCESS;
16518 }
16519
16520 case IEMMODE_32BIT:
16521 {
16522 IEM_MC_BEGIN(3, 2);
16523 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
16524 IEM_MC_ARG(uint32_t, u32Src, 1);
16525 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16527
16528 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16529 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
16530 IEM_MC_ASSIGN(u32Src, u32Imm);
16531 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16532 IEM_MC_FETCH_EFLAGS(EFlags);
16533 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
16534
16535 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
16536 IEM_MC_COMMIT_EFLAGS(EFlags);
16537 IEM_MC_ADVANCE_RIP();
16538 IEM_MC_END();
16539 return VINF_SUCCESS;
16540 }
16541
16542 case IEMMODE_64BIT:
16543 {
16544 IEM_MC_BEGIN(3, 2);
16545 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
16546 IEM_MC_ARG(uint64_t, u64Src, 1);
16547 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
16548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16549
16550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
16551 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
16552 IEM_MC_ASSIGN(u64Src, u64Imm);
16553 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pIemCpu->iEffSeg, GCPtrEffDst, 0 /*arg*/);
16554 IEM_MC_FETCH_EFLAGS(EFlags);
16555 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
16556
16557 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
16558 IEM_MC_COMMIT_EFLAGS(EFlags);
16559 IEM_MC_ADVANCE_RIP();
16560 IEM_MC_END();
16561 return VINF_SUCCESS;
16562 }
16563
16564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16565 }
16566 }
16567}
16568
16569
16570/** Opcode 0xf6 /4, /5, /6 and /7. */
16571FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
16572{
16573 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16574
16575 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16576 {
16577 /* register access */
16578 IEMOP_HLP_NO_LOCK_PREFIX();
16579 IEM_MC_BEGIN(3, 1);
16580 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16581 IEM_MC_ARG(uint8_t, u8Value, 1);
16582 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16583 IEM_MC_LOCAL(int32_t, rc);
16584
16585 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16586 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16587 IEM_MC_REF_EFLAGS(pEFlags);
16588 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16589 IEM_MC_IF_LOCAL_IS_Z(rc) {
16590 IEM_MC_ADVANCE_RIP();
16591 } IEM_MC_ELSE() {
16592 IEM_MC_RAISE_DIVIDE_ERROR();
16593 } IEM_MC_ENDIF();
16594
16595 IEM_MC_END();
16596 }
16597 else
16598 {
16599 /* memory access. */
16600 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16601
16602 IEM_MC_BEGIN(3, 2);
16603 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16604 IEM_MC_ARG(uint8_t, u8Value, 1);
16605 IEM_MC_ARG(uint32_t *, pEFlags, 2);
16606 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16607 IEM_MC_LOCAL(int32_t, rc);
16608
16609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16610 IEM_MC_FETCH_MEM_U8(u8Value, pIemCpu->iEffSeg, GCPtrEffDst);
16611 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16612 IEM_MC_REF_EFLAGS(pEFlags);
16613 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
16614 IEM_MC_IF_LOCAL_IS_Z(rc) {
16615 IEM_MC_ADVANCE_RIP();
16616 } IEM_MC_ELSE() {
16617 IEM_MC_RAISE_DIVIDE_ERROR();
16618 } IEM_MC_ENDIF();
16619
16620 IEM_MC_END();
16621 }
16622 return VINF_SUCCESS;
16623}
16624
16625
16626/** Opcode 0xf7 /4, /5, /6 and /7. */
16627FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
16628{
16629 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
16630 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16631
16632 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16633 {
16634 /* register access */
16635 switch (pIemCpu->enmEffOpSize)
16636 {
16637 case IEMMODE_16BIT:
16638 {
16639 IEMOP_HLP_NO_LOCK_PREFIX();
16640 IEM_MC_BEGIN(4, 1);
16641 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16642 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16643 IEM_MC_ARG(uint16_t, u16Value, 2);
16644 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16645 IEM_MC_LOCAL(int32_t, rc);
16646
16647 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16648 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16649 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
16650 IEM_MC_REF_EFLAGS(pEFlags);
16651 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
16652 IEM_MC_IF_LOCAL_IS_Z(rc) {
16653 IEM_MC_ADVANCE_RIP();
16654 } IEM_MC_ELSE() {
16655 IEM_MC_RAISE_DIVIDE_ERROR();
16656 } IEM_MC_ENDIF();
16657
16658 IEM_MC_END();
16659 return VINF_SUCCESS;
16660 }
16661
16662 case IEMMODE_32BIT:
16663 {
16664 IEMOP_HLP_NO_LOCK_PREFIX();
16665 IEM_MC_BEGIN(4, 1);
16666 IEM_MC_ARG(uint32_t *, pu32AX, 0);
16667 IEM_MC_ARG(uint32_t *, pu32DX, 1);
16668 IEM_MC_ARG(uint32_t, u32Value, 2);
16669 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16670 IEM_MC_LOCAL(int32_t, rc);
16671
16672 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16673 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
16674 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
16675 IEM_MC_REF_EFLAGS(pEFlags);
16676 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
16677 IEM_MC_IF_LOCAL_IS_Z(rc) {
16678 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
16679 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
16680 IEM_MC_ADVANCE_RIP();
16681 } IEM_MC_ELSE() {
16682 IEM_MC_RAISE_DIVIDE_ERROR();
16683 } IEM_MC_ENDIF();
16684
16685 IEM_MC_END();
16686 return VINF_SUCCESS;
16687 }
16688
16689 case IEMMODE_64BIT:
16690 {
16691 IEMOP_HLP_NO_LOCK_PREFIX();
16692 IEM_MC_BEGIN(4, 1);
16693 IEM_MC_ARG(uint64_t *, pu64AX, 0);
16694 IEM_MC_ARG(uint64_t *, pu64DX, 1);
16695 IEM_MC_ARG(uint64_t, u64Value, 2);
16696 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16697 IEM_MC_LOCAL(int32_t, rc);
16698
16699 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16700 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
16701 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
16702 IEM_MC_REF_EFLAGS(pEFlags);
16703 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
16704 IEM_MC_IF_LOCAL_IS_Z(rc) {
16705 IEM_MC_ADVANCE_RIP();
16706 } IEM_MC_ELSE() {
16707 IEM_MC_RAISE_DIVIDE_ERROR();
16708 } IEM_MC_ENDIF();
16709
16710 IEM_MC_END();
16711 return VINF_SUCCESS;
16712 }
16713
16714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16715 }
16716 }
16717 else
16718 {
16719 /* memory access. */
16720 switch (pIemCpu->enmEffOpSize)
16721 {
16722 case IEMMODE_16BIT:
16723 {
16724 IEMOP_HLP_NO_LOCK_PREFIX();
16725 IEM_MC_BEGIN(4, 2);
16726 IEM_MC_ARG(uint16_t *, pu16AX, 0);
16727 IEM_MC_ARG(uint16_t *, pu16DX, 1);
16728 IEM_MC_ARG(uint16_t, u16Value, 2);
16729 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16731 IEM_MC_LOCAL(int32_t, rc);
16732
16733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16734 IEM_MC_FETCH_MEM_U16(u16Value, pIemCpu->iEffSeg, GCPtrEffDst);
16735 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
16736 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
16737 IEM_MC_REF_EFLAGS(pEFlags);
16738 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
16739 IEM_MC_IF_LOCAL_IS_Z(rc) {
16740 IEM_MC_ADVANCE_RIP();
16741 } IEM_MC_ELSE() {
16742 IEM_MC_RAISE_DIVIDE_ERROR();
16743 } IEM_MC_ENDIF();
16744
16745 IEM_MC_END();
16746 return VINF_SUCCESS;
16747 }
16748
16749 case IEMMODE_32BIT:
16750 {
16751 IEMOP_HLP_NO_LOCK_PREFIX();
16752 IEM_MC_BEGIN(4, 2);
16753 IEM_MC_ARG(uint32_t *, pu32AX, 0);
16754 IEM_MC_ARG(uint32_t *, pu32DX, 1);
16755 IEM_MC_ARG(uint32_t, u32Value, 2);
16756 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16758 IEM_MC_LOCAL(int32_t, rc);
16759
16760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16761 IEM_MC_FETCH_MEM_U32(u32Value, pIemCpu->iEffSeg, GCPtrEffDst);
16762 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
16763 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
16764 IEM_MC_REF_EFLAGS(pEFlags);
16765 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
16766 IEM_MC_IF_LOCAL_IS_Z(rc) {
16767 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
16768 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
16769 IEM_MC_ADVANCE_RIP();
16770 } IEM_MC_ELSE() {
16771 IEM_MC_RAISE_DIVIDE_ERROR();
16772 } IEM_MC_ENDIF();
16773
16774 IEM_MC_END();
16775 return VINF_SUCCESS;
16776 }
16777
16778 case IEMMODE_64BIT:
16779 {
16780 IEMOP_HLP_NO_LOCK_PREFIX();
16781 IEM_MC_BEGIN(4, 2);
16782 IEM_MC_ARG(uint64_t *, pu64AX, 0);
16783 IEM_MC_ARG(uint64_t *, pu64DX, 1);
16784 IEM_MC_ARG(uint64_t, u64Value, 2);
16785 IEM_MC_ARG(uint32_t *, pEFlags, 3);
16786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
16787 IEM_MC_LOCAL(int32_t, rc);
16788
16789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
16790 IEM_MC_FETCH_MEM_U64(u64Value, pIemCpu->iEffSeg, GCPtrEffDst);
16791 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
16792 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
16793 IEM_MC_REF_EFLAGS(pEFlags);
16794 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
16795 IEM_MC_IF_LOCAL_IS_Z(rc) {
16796 IEM_MC_ADVANCE_RIP();
16797 } IEM_MC_ELSE() {
16798 IEM_MC_RAISE_DIVIDE_ERROR();
16799 } IEM_MC_ENDIF();
16800
16801 IEM_MC_END();
16802 return VINF_SUCCESS;
16803 }
16804
16805 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16806 }
16807 }
16808}
16809
16810/** Opcode 0xf6. */
16811FNIEMOP_DEF(iemOp_Grp3_Eb)
16812{
16813 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16814 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16815 {
16816 case 0:
16817 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
16818 case 1:
16819/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
16820 return IEMOP_RAISE_INVALID_OPCODE();
16821 case 2:
16822 IEMOP_MNEMONIC("not Eb");
16823 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
16824 case 3:
16825 IEMOP_MNEMONIC("neg Eb");
16826 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
16827 case 4:
16828 IEMOP_MNEMONIC("mul Eb");
16829 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16830 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
16831 case 5:
16832 IEMOP_MNEMONIC("imul Eb");
16833 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16834 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
16835 case 6:
16836 IEMOP_MNEMONIC("div Eb");
16837 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16838 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
16839 case 7:
16840 IEMOP_MNEMONIC("idiv Eb");
16841 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16842 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
16843 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16844 }
16845}
16846
16847
16848/** Opcode 0xf7. */
16849FNIEMOP_DEF(iemOp_Grp3_Ev)
16850{
16851 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16852 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16853 {
16854 case 0:
16855 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
16856 case 1:
16857/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
16858 return IEMOP_RAISE_INVALID_OPCODE();
16859 case 2:
16860 IEMOP_MNEMONIC("not Ev");
16861 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
16862 case 3:
16863 IEMOP_MNEMONIC("neg Ev");
16864 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
16865 case 4:
16866 IEMOP_MNEMONIC("mul Ev");
16867 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16868 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
16869 case 5:
16870 IEMOP_MNEMONIC("imul Ev");
16871 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
16872 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
16873 case 6:
16874 IEMOP_MNEMONIC("div Ev");
16875 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16876 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
16877 case 7:
16878 IEMOP_MNEMONIC("idiv Ev");
16879 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
16880 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
16881 IEM_NOT_REACHED_DEFAULT_CASE_RET();
16882 }
16883}
16884
16885
16886/** Opcode 0xf8. */
16887FNIEMOP_DEF(iemOp_clc)
16888{
16889 IEMOP_MNEMONIC("clc");
16890 IEMOP_HLP_NO_LOCK_PREFIX();
16891 IEM_MC_BEGIN(0, 0);
16892 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
16893 IEM_MC_ADVANCE_RIP();
16894 IEM_MC_END();
16895 return VINF_SUCCESS;
16896}
16897
16898
16899/** Opcode 0xf9. */
16900FNIEMOP_DEF(iemOp_stc)
16901{
16902 IEMOP_MNEMONIC("stc");
16903 IEMOP_HLP_NO_LOCK_PREFIX();
16904 IEM_MC_BEGIN(0, 0);
16905 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
16906 IEM_MC_ADVANCE_RIP();
16907 IEM_MC_END();
16908 return VINF_SUCCESS;
16909}
16910
16911
16912/** Opcode 0xfa. */
16913FNIEMOP_DEF(iemOp_cli)
16914{
16915 IEMOP_MNEMONIC("cli");
16916 IEMOP_HLP_NO_LOCK_PREFIX();
16917 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
16918}
16919
16920
16921FNIEMOP_DEF(iemOp_sti)
16922{
16923 IEMOP_MNEMONIC("sti");
16924 IEMOP_HLP_NO_LOCK_PREFIX();
16925 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
16926}
16927
16928
16929/** Opcode 0xfc. */
16930FNIEMOP_DEF(iemOp_cld)
16931{
16932 IEMOP_MNEMONIC("cld");
16933 IEMOP_HLP_NO_LOCK_PREFIX();
16934 IEM_MC_BEGIN(0, 0);
16935 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
16936 IEM_MC_ADVANCE_RIP();
16937 IEM_MC_END();
16938 return VINF_SUCCESS;
16939}
16940
16941
16942/** Opcode 0xfd. */
16943FNIEMOP_DEF(iemOp_std)
16944{
16945 IEMOP_MNEMONIC("std");
16946 IEMOP_HLP_NO_LOCK_PREFIX();
16947 IEM_MC_BEGIN(0, 0);
16948 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
16949 IEM_MC_ADVANCE_RIP();
16950 IEM_MC_END();
16951 return VINF_SUCCESS;
16952}
16953
16954
16955/** Opcode 0xfe. */
16956FNIEMOP_DEF(iemOp_Grp4)
16957{
16958 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
16959 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
16960 {
16961 case 0:
16962 IEMOP_MNEMONIC("inc Ev");
16963 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
16964 case 1:
16965 IEMOP_MNEMONIC("dec Ev");
16966 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
16967 default:
16968 IEMOP_MNEMONIC("grp4-ud");
16969 return IEMOP_RAISE_INVALID_OPCODE();
16970 }
16971}
16972
16973
16974/**
16975 * Opcode 0xff /2.
16976 * @param bRm The RM byte.
16977 */
16978FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
16979{
16980 IEMOP_MNEMONIC("calln Ev");
16981 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
16982 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
16983
16984 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
16985 {
16986 /* The new RIP is taken from a register. */
16987 switch (pIemCpu->enmEffOpSize)
16988 {
16989 case IEMMODE_16BIT:
16990 IEM_MC_BEGIN(1, 0);
16991 IEM_MC_ARG(uint16_t, u16Target, 0);
16992 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
16993 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
16994 IEM_MC_END()
16995 return VINF_SUCCESS;
16996
16997 case IEMMODE_32BIT:
16998 IEM_MC_BEGIN(1, 0);
16999 IEM_MC_ARG(uint32_t, u32Target, 0);
17000 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17001 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17002 IEM_MC_END()
17003 return VINF_SUCCESS;
17004
17005 case IEMMODE_64BIT:
17006 IEM_MC_BEGIN(1, 0);
17007 IEM_MC_ARG(uint64_t, u64Target, 0);
17008 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17009 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17010 IEM_MC_END()
17011 return VINF_SUCCESS;
17012
17013 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17014 }
17015 }
17016 else
17017 {
17018 /* The new RIP is taken from a register. */
17019 switch (pIemCpu->enmEffOpSize)
17020 {
17021 case IEMMODE_16BIT:
17022 IEM_MC_BEGIN(1, 1);
17023 IEM_MC_ARG(uint16_t, u16Target, 0);
17024 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17026 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17027 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
17028 IEM_MC_END()
17029 return VINF_SUCCESS;
17030
17031 case IEMMODE_32BIT:
17032 IEM_MC_BEGIN(1, 1);
17033 IEM_MC_ARG(uint32_t, u32Target, 0);
17034 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17036 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17037 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
17038 IEM_MC_END()
17039 return VINF_SUCCESS;
17040
17041 case IEMMODE_64BIT:
17042 IEM_MC_BEGIN(1, 1);
17043 IEM_MC_ARG(uint64_t, u64Target, 0);
17044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17046 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17047 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
17048 IEM_MC_END()
17049 return VINF_SUCCESS;
17050
17051 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17052 }
17053 }
17054}
17055
17056typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
17057
17058FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
17059{
17060 /* Registers? How?? */
17061 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17062 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
17063
17064 /* Far pointer loaded from memory. */
17065 switch (pIemCpu->enmEffOpSize)
17066 {
17067 case IEMMODE_16BIT:
17068 IEM_MC_BEGIN(3, 1);
17069 IEM_MC_ARG(uint16_t, u16Sel, 0);
17070 IEM_MC_ARG(uint16_t, offSeg, 1);
17071 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17072 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17073 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17075 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17076 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 2);
17077 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17078 IEM_MC_END();
17079 return VINF_SUCCESS;
17080
17081 case IEMMODE_64BIT:
17082 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
17083 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
17084 * and call far qword [rsp] encodings. */
17085 if (!IEM_IS_GUEST_CPU_AMD(pIemCpu))
17086 {
17087 IEM_MC_BEGIN(3, 1);
17088 IEM_MC_ARG(uint16_t, u16Sel, 0);
17089 IEM_MC_ARG(uint64_t, offSeg, 1);
17090 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
17091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17092 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17094 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17095 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 8);
17096 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17097 IEM_MC_END();
17098 return VINF_SUCCESS;
17099 }
17100 /* AMD falls thru. */
17101
17102 case IEMMODE_32BIT:
17103 IEM_MC_BEGIN(3, 1);
17104 IEM_MC_ARG(uint16_t, u16Sel, 0);
17105 IEM_MC_ARG(uint32_t, offSeg, 1);
17106 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
17107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
17110 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc);
17111 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc, 4);
17112 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
17113 IEM_MC_END();
17114 return VINF_SUCCESS;
17115
17116 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17117 }
17118}
17119
17120
17121/**
17122 * Opcode 0xff /3.
17123 * @param bRm The RM byte.
17124 */
17125FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
17126{
17127 IEMOP_MNEMONIC("callf Ep");
17128 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
17129}
17130
17131
17132/**
17133 * Opcode 0xff /4.
17134 * @param bRm The RM byte.
17135 */
17136FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
17137{
17138 IEMOP_MNEMONIC("jmpn Ev");
17139 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17140 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17141
17142 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17143 {
17144 /* The new RIP is taken from a register. */
17145 switch (pIemCpu->enmEffOpSize)
17146 {
17147 case IEMMODE_16BIT:
17148 IEM_MC_BEGIN(0, 1);
17149 IEM_MC_LOCAL(uint16_t, u16Target);
17150 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17151 IEM_MC_SET_RIP_U16(u16Target);
17152 IEM_MC_END()
17153 return VINF_SUCCESS;
17154
17155 case IEMMODE_32BIT:
17156 IEM_MC_BEGIN(0, 1);
17157 IEM_MC_LOCAL(uint32_t, u32Target);
17158 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17159 IEM_MC_SET_RIP_U32(u32Target);
17160 IEM_MC_END()
17161 return VINF_SUCCESS;
17162
17163 case IEMMODE_64BIT:
17164 IEM_MC_BEGIN(0, 1);
17165 IEM_MC_LOCAL(uint64_t, u64Target);
17166 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17167 IEM_MC_SET_RIP_U64(u64Target);
17168 IEM_MC_END()
17169 return VINF_SUCCESS;
17170
17171 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17172 }
17173 }
17174 else
17175 {
17176 /* The new RIP is taken from a memory location. */
17177 switch (pIemCpu->enmEffOpSize)
17178 {
17179 case IEMMODE_16BIT:
17180 IEM_MC_BEGIN(0, 2);
17181 IEM_MC_LOCAL(uint16_t, u16Target);
17182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17184 IEM_MC_FETCH_MEM_U16(u16Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17185 IEM_MC_SET_RIP_U16(u16Target);
17186 IEM_MC_END()
17187 return VINF_SUCCESS;
17188
17189 case IEMMODE_32BIT:
17190 IEM_MC_BEGIN(0, 2);
17191 IEM_MC_LOCAL(uint32_t, u32Target);
17192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17194 IEM_MC_FETCH_MEM_U32(u32Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17195 IEM_MC_SET_RIP_U32(u32Target);
17196 IEM_MC_END()
17197 return VINF_SUCCESS;
17198
17199 case IEMMODE_64BIT:
17200 IEM_MC_BEGIN(0, 2);
17201 IEM_MC_LOCAL(uint64_t, u64Target);
17202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17204 IEM_MC_FETCH_MEM_U64(u64Target, pIemCpu->iEffSeg, GCPtrEffSrc);
17205 IEM_MC_SET_RIP_U64(u64Target);
17206 IEM_MC_END()
17207 return VINF_SUCCESS;
17208
17209 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17210 }
17211 }
17212}
17213
17214
17215/**
17216 * Opcode 0xff /5.
17217 * @param bRm The RM byte.
17218 */
17219FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
17220{
17221 IEMOP_MNEMONIC("jmpf Ep");
17222 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
17223}
17224
17225
17226/**
17227 * Opcode 0xff /6.
17228 * @param bRm The RM byte.
17229 */
17230FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
17231{
17232 IEMOP_MNEMONIC("push Ev");
17233 IEMOP_HLP_NO_LOCK_PREFIX(); /** @todo Too early? */
17234
17235 /* Registers are handled by a common worker. */
17236 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
17237 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB);
17238
17239 /* Memory we do here. */
17240 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
17241 switch (pIemCpu->enmEffOpSize)
17242 {
17243 case IEMMODE_16BIT:
17244 IEM_MC_BEGIN(0, 2);
17245 IEM_MC_LOCAL(uint16_t, u16Src);
17246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17248 IEM_MC_FETCH_MEM_U16(u16Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17249 IEM_MC_PUSH_U16(u16Src);
17250 IEM_MC_ADVANCE_RIP();
17251 IEM_MC_END();
17252 return VINF_SUCCESS;
17253
17254 case IEMMODE_32BIT:
17255 IEM_MC_BEGIN(0, 2);
17256 IEM_MC_LOCAL(uint32_t, u32Src);
17257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17258 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17259 IEM_MC_FETCH_MEM_U32(u32Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17260 IEM_MC_PUSH_U32(u32Src);
17261 IEM_MC_ADVANCE_RIP();
17262 IEM_MC_END();
17263 return VINF_SUCCESS;
17264
17265 case IEMMODE_64BIT:
17266 IEM_MC_BEGIN(0, 2);
17267 IEM_MC_LOCAL(uint64_t, u64Src);
17268 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
17269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
17270 IEM_MC_FETCH_MEM_U64(u64Src, pIemCpu->iEffSeg, GCPtrEffSrc);
17271 IEM_MC_PUSH_U64(u64Src);
17272 IEM_MC_ADVANCE_RIP();
17273 IEM_MC_END();
17274 return VINF_SUCCESS;
17275
17276 IEM_NOT_REACHED_DEFAULT_CASE_RET();
17277 }
17278}
17279
17280
17281/** Opcode 0xff. */
17282FNIEMOP_DEF(iemOp_Grp5)
17283{
17284 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
17285 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
17286 {
17287 case 0:
17288 IEMOP_MNEMONIC("inc Ev");
17289 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
17290 case 1:
17291 IEMOP_MNEMONIC("dec Ev");
17292 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
17293 case 2:
17294 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
17295 case 3:
17296 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
17297 case 4:
17298 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
17299 case 5:
17300 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
17301 case 6:
17302 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
17303 case 7:
17304 IEMOP_MNEMONIC("grp5-ud");
17305 return IEMOP_RAISE_INVALID_OPCODE();
17306 }
17307 AssertFailedReturn(VERR_IEM_IPE_3);
17308}
17309
17310
17311
17312const PFNIEMOP g_apfnOneByteMap[256] =
17313{
17314 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
17315 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
17316 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
17317 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
17318 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
17319 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
17320 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
17321 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
17322 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
17323 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
17324 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
17325 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
17326 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
17327 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
17328 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
17329 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
17330 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
17331 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
17332 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
17333 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
17334 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
17335 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
17336 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
17337 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
17338 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
17339 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
17340 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
17341 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
17342 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
17343 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
17344 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
17345 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
17346 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
17347 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
17348 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
17349 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A,
17350 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
17351 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
17352 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
17353 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
17354 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
17355 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
17356 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
17357 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
17358 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
17359 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
17360 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
17361 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
17362 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
17363 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
17364 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
17365 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
17366 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
17367 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
17368 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
17369 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
17370 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
17371 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
17372 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
17373 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
17374 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
17375 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
17376 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
17377 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
17378};
17379
17380
17381/** @} */
17382
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette